gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: versions.proto
package org.tensorflow.framework;
/**
* <pre>
* Version information for a piece of serialized data
* There are different types of versions for each type of data
* (GraphDef, etc.), but they all have the same common shape
* described here.
* Each consumer has "consumer" and "min_producer" versions (specified
* elsewhere). A consumer is allowed to consume this data if
* producer >= min_producer
* consumer >= min_consumer
* consumer not in bad_consumers
* </pre>
*
* Protobuf type {@code tensorflow.VersionDef}
*/
public final class VersionDef extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.VersionDef)
VersionDefOrBuilder {
// Use VersionDef.newBuilder() to construct.
private VersionDef(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VersionDef() {
producer_ = 0;
minConsumer_ = 0;
badConsumers_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private VersionDef(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 8: {
producer_ = input.readInt32();
break;
}
case 16: {
minConsumer_ = input.readInt32();
break;
}
case 24: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
badConsumers_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000004;
}
badConsumers_.add(input.readInt32());
break;
}
case 26: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) {
badConsumers_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000004;
}
while (input.getBytesUntilLimit() > 0) {
badConsumers_.add(input.readInt32());
}
input.popLimit(limit);
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
badConsumers_ = java.util.Collections.unmodifiableList(badConsumers_);
}
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.VersionsProtos.internal_static_tensorflow_VersionDef_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.VersionsProtos.internal_static_tensorflow_VersionDef_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.VersionDef.class, org.tensorflow.framework.VersionDef.Builder.class);
}
private int bitField0_;
public static final int PRODUCER_FIELD_NUMBER = 1;
private int producer_;
/**
* <pre>
* The version of the code that produced this data.
* </pre>
*
* <code>optional int32 producer = 1;</code>
*/
public int getProducer() {
return producer_;
}
public static final int MIN_CONSUMER_FIELD_NUMBER = 2;
private int minConsumer_;
/**
* <pre>
* Any consumer below this version is not allowed to consume this data.
* </pre>
*
* <code>optional int32 min_consumer = 2;</code>
*/
public int getMinConsumer() {
return minConsumer_;
}
public static final int BAD_CONSUMERS_FIELD_NUMBER = 3;
private java.util.List<java.lang.Integer> badConsumers_;
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public java.util.List<java.lang.Integer>
getBadConsumersList() {
return badConsumers_;
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public int getBadConsumersCount() {
return badConsumers_.size();
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public int getBadConsumers(int index) {
return badConsumers_.get(index);
}
private int badConsumersMemoizedSerializedSize = -1;
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (producer_ != 0) {
output.writeInt32(1, producer_);
}
if (minConsumer_ != 0) {
output.writeInt32(2, minConsumer_);
}
if (getBadConsumersList().size() > 0) {
output.writeUInt32NoTag(26);
output.writeUInt32NoTag(badConsumersMemoizedSerializedSize);
}
for (int i = 0; i < badConsumers_.size(); i++) {
output.writeInt32NoTag(badConsumers_.get(i));
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (producer_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, producer_);
}
if (minConsumer_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, minConsumer_);
}
{
int dataSize = 0;
for (int i = 0; i < badConsumers_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeInt32SizeNoTag(badConsumers_.get(i));
}
size += dataSize;
if (!getBadConsumersList().isEmpty()) {
size += 1;
size += com.google.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
badConsumersMemoizedSerializedSize = dataSize;
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.framework.VersionDef)) {
return super.equals(obj);
}
org.tensorflow.framework.VersionDef other = (org.tensorflow.framework.VersionDef) obj;
boolean result = true;
result = result && (getProducer()
== other.getProducer());
result = result && (getMinConsumer()
== other.getMinConsumer());
result = result && getBadConsumersList()
.equals(other.getBadConsumersList());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + PRODUCER_FIELD_NUMBER;
hash = (53 * hash) + getProducer();
hash = (37 * hash) + MIN_CONSUMER_FIELD_NUMBER;
hash = (53 * hash) + getMinConsumer();
if (getBadConsumersCount() > 0) {
hash = (37 * hash) + BAD_CONSUMERS_FIELD_NUMBER;
hash = (53 * hash) + getBadConsumersList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.framework.VersionDef parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.VersionDef parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.VersionDef parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.VersionDef parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.VersionDef parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.VersionDef parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.VersionDef parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.framework.VersionDef parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.VersionDef parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.VersionDef parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.framework.VersionDef prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Version information for a piece of serialized data
* There are different types of versions for each type of data
* (GraphDef, etc.), but they all have the same common shape
* described here.
* Each consumer has "consumer" and "min_producer" versions (specified
* elsewhere). A consumer is allowed to consume this data if
* producer >= min_producer
* consumer >= min_consumer
* consumer not in bad_consumers
* </pre>
*
* Protobuf type {@code tensorflow.VersionDef}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:tensorflow.VersionDef)
org.tensorflow.framework.VersionDefOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.VersionsProtos.internal_static_tensorflow_VersionDef_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.VersionsProtos.internal_static_tensorflow_VersionDef_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.VersionDef.class, org.tensorflow.framework.VersionDef.Builder.class);
}
// Construct using org.tensorflow.framework.VersionDef.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
producer_ = 0;
minConsumer_ = 0;
badConsumers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.framework.VersionsProtos.internal_static_tensorflow_VersionDef_descriptor;
}
public org.tensorflow.framework.VersionDef getDefaultInstanceForType() {
return org.tensorflow.framework.VersionDef.getDefaultInstance();
}
public org.tensorflow.framework.VersionDef build() {
org.tensorflow.framework.VersionDef result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.tensorflow.framework.VersionDef buildPartial() {
org.tensorflow.framework.VersionDef result = new org.tensorflow.framework.VersionDef(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.producer_ = producer_;
result.minConsumer_ = minConsumer_;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
badConsumers_ = java.util.Collections.unmodifiableList(badConsumers_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.badConsumers_ = badConsumers_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.tensorflow.framework.VersionDef) {
return mergeFrom((org.tensorflow.framework.VersionDef)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.framework.VersionDef other) {
if (other == org.tensorflow.framework.VersionDef.getDefaultInstance()) return this;
if (other.getProducer() != 0) {
setProducer(other.getProducer());
}
if (other.getMinConsumer() != 0) {
setMinConsumer(other.getMinConsumer());
}
if (!other.badConsumers_.isEmpty()) {
if (badConsumers_.isEmpty()) {
badConsumers_ = other.badConsumers_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureBadConsumersIsMutable();
badConsumers_.addAll(other.badConsumers_);
}
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.tensorflow.framework.VersionDef parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.tensorflow.framework.VersionDef) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int producer_ ;
/**
* <pre>
* The version of the code that produced this data.
* </pre>
*
* <code>optional int32 producer = 1;</code>
*/
public int getProducer() {
return producer_;
}
/**
* <pre>
* The version of the code that produced this data.
* </pre>
*
* <code>optional int32 producer = 1;</code>
*/
public Builder setProducer(int value) {
producer_ = value;
onChanged();
return this;
}
/**
* <pre>
* The version of the code that produced this data.
* </pre>
*
* <code>optional int32 producer = 1;</code>
*/
public Builder clearProducer() {
producer_ = 0;
onChanged();
return this;
}
private int minConsumer_ ;
/**
* <pre>
* Any consumer below this version is not allowed to consume this data.
* </pre>
*
* <code>optional int32 min_consumer = 2;</code>
*/
public int getMinConsumer() {
return minConsumer_;
}
/**
* <pre>
* Any consumer below this version is not allowed to consume this data.
* </pre>
*
* <code>optional int32 min_consumer = 2;</code>
*/
public Builder setMinConsumer(int value) {
minConsumer_ = value;
onChanged();
return this;
}
/**
* <pre>
* Any consumer below this version is not allowed to consume this data.
* </pre>
*
* <code>optional int32 min_consumer = 2;</code>
*/
public Builder clearMinConsumer() {
minConsumer_ = 0;
onChanged();
return this;
}
private java.util.List<java.lang.Integer> badConsumers_ = java.util.Collections.emptyList();
private void ensureBadConsumersIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
badConsumers_ = new java.util.ArrayList<java.lang.Integer>(badConsumers_);
bitField0_ |= 0x00000004;
}
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public java.util.List<java.lang.Integer>
getBadConsumersList() {
return java.util.Collections.unmodifiableList(badConsumers_);
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public int getBadConsumersCount() {
return badConsumers_.size();
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public int getBadConsumers(int index) {
return badConsumers_.get(index);
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public Builder setBadConsumers(
int index, int value) {
ensureBadConsumersIsMutable();
badConsumers_.set(index, value);
onChanged();
return this;
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public Builder addBadConsumers(int value) {
ensureBadConsumersIsMutable();
badConsumers_.add(value);
onChanged();
return this;
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public Builder addAllBadConsumers(
java.lang.Iterable<? extends java.lang.Integer> values) {
ensureBadConsumersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, badConsumers_);
onChanged();
return this;
}
/**
* <pre>
* Specific consumer versions which are disallowed (e.g. due to bugs).
* </pre>
*
* <code>repeated int32 bad_consumers = 3;</code>
*/
public Builder clearBadConsumers() {
badConsumers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:tensorflow.VersionDef)
}
// @@protoc_insertion_point(class_scope:tensorflow.VersionDef)
private static final org.tensorflow.framework.VersionDef DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.framework.VersionDef();
}
public static org.tensorflow.framework.VersionDef getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VersionDef>
PARSER = new com.google.protobuf.AbstractParser<VersionDef>() {
public VersionDef parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new VersionDef(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<VersionDef> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VersionDef> getParserForType() {
return PARSER;
}
public org.tensorflow.framework.VersionDef getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.appintegrations;
import javax.annotation.Generated;
import com.amazonaws.services.appintegrations.model.*;
/**
* Abstract implementation of {@code AmazonAppIntegrationsAsync}. Convenient method forms pass through to the
* corresponding overload that takes a request object and an {@code AsyncHandler}, which throws an
* {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAmazonAppIntegrationsAsync extends AbstractAmazonAppIntegrations implements AmazonAppIntegrationsAsync {
protected AbstractAmazonAppIntegrationsAsync() {
}
@Override
public java.util.concurrent.Future<CreateDataIntegrationResult> createDataIntegrationAsync(CreateDataIntegrationRequest request) {
return createDataIntegrationAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateDataIntegrationResult> createDataIntegrationAsync(CreateDataIntegrationRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDataIntegrationRequest, CreateDataIntegrationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateEventIntegrationResult> createEventIntegrationAsync(CreateEventIntegrationRequest request) {
return createEventIntegrationAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateEventIntegrationResult> createEventIntegrationAsync(CreateEventIntegrationRequest request,
com.amazonaws.handlers.AsyncHandler<CreateEventIntegrationRequest, CreateEventIntegrationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteDataIntegrationResult> deleteDataIntegrationAsync(DeleteDataIntegrationRequest request) {
return deleteDataIntegrationAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteDataIntegrationResult> deleteDataIntegrationAsync(DeleteDataIntegrationRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDataIntegrationRequest, DeleteDataIntegrationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteEventIntegrationResult> deleteEventIntegrationAsync(DeleteEventIntegrationRequest request) {
return deleteEventIntegrationAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteEventIntegrationResult> deleteEventIntegrationAsync(DeleteEventIntegrationRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteEventIntegrationRequest, DeleteEventIntegrationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetDataIntegrationResult> getDataIntegrationAsync(GetDataIntegrationRequest request) {
return getDataIntegrationAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetDataIntegrationResult> getDataIntegrationAsync(GetDataIntegrationRequest request,
com.amazonaws.handlers.AsyncHandler<GetDataIntegrationRequest, GetDataIntegrationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetEventIntegrationResult> getEventIntegrationAsync(GetEventIntegrationRequest request) {
return getEventIntegrationAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetEventIntegrationResult> getEventIntegrationAsync(GetEventIntegrationRequest request,
com.amazonaws.handlers.AsyncHandler<GetEventIntegrationRequest, GetEventIntegrationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListDataIntegrationAssociationsResult> listDataIntegrationAssociationsAsync(
ListDataIntegrationAssociationsRequest request) {
return listDataIntegrationAssociationsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListDataIntegrationAssociationsResult> listDataIntegrationAssociationsAsync(
ListDataIntegrationAssociationsRequest request,
com.amazonaws.handlers.AsyncHandler<ListDataIntegrationAssociationsRequest, ListDataIntegrationAssociationsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListDataIntegrationsResult> listDataIntegrationsAsync(ListDataIntegrationsRequest request) {
return listDataIntegrationsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListDataIntegrationsResult> listDataIntegrationsAsync(ListDataIntegrationsRequest request,
com.amazonaws.handlers.AsyncHandler<ListDataIntegrationsRequest, ListDataIntegrationsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListEventIntegrationAssociationsResult> listEventIntegrationAssociationsAsync(
ListEventIntegrationAssociationsRequest request) {
return listEventIntegrationAssociationsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListEventIntegrationAssociationsResult> listEventIntegrationAssociationsAsync(
ListEventIntegrationAssociationsRequest request,
com.amazonaws.handlers.AsyncHandler<ListEventIntegrationAssociationsRequest, ListEventIntegrationAssociationsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListEventIntegrationsResult> listEventIntegrationsAsync(ListEventIntegrationsRequest request) {
return listEventIntegrationsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListEventIntegrationsResult> listEventIntegrationsAsync(ListEventIntegrationsRequest request,
com.amazonaws.handlers.AsyncHandler<ListEventIntegrationsRequest, ListEventIntegrationsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request) {
return listTagsForResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request,
com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request) {
return tagResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request,
com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request) {
return untagResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request,
com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateDataIntegrationResult> updateDataIntegrationAsync(UpdateDataIntegrationRequest request) {
return updateDataIntegrationAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateDataIntegrationResult> updateDataIntegrationAsync(UpdateDataIntegrationRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateDataIntegrationRequest, UpdateDataIntegrationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateEventIntegrationResult> updateEventIntegrationAsync(UpdateEventIntegrationRequest request) {
return updateEventIntegrationAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateEventIntegrationResult> updateEventIntegrationAsync(UpdateEventIntegrationRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateEventIntegrationRequest, UpdateEventIntegrationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
import org.apache.camel.spi.PollingConsumerPollStrategy;
/**
* The beanstalk component is used for job retrieval and post-processing of
* Beanstalk jobs.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface BeanstalkEndpointBuilderFactory {
/**
* Builder for endpoint consumers for the Beanstalk component.
*/
public interface BeanstalkEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default AdvancedBeanstalkEndpointConsumerBuilder advanced() {
return (AdvancedBeanstalkEndpointConsumerBuilder) this;
}
/**
* put means to put the job into Beanstalk. Job body is specified in the
* Camel message body. Job ID will be returned in beanstalk.jobId
* message header. delete, release, touch or bury expect Job ID in the
* message header beanstalk.jobId. Result of the operation is returned
* in beanstalk.result message header kick expects the number of jobs to
* kick in the message body and returns the number of jobs actually
* kicked out in the message header beanstalk.result.
*
* The option is a:
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code>
* type.
*
* Group: common
*/
default BeanstalkEndpointConsumerBuilder command(
BeanstalkCommand command) {
doSetProperty("command", command);
return this;
}
/**
* put means to put the job into Beanstalk. Job body is specified in the
* Camel message body. Job ID will be returned in beanstalk.jobId
* message header. delete, release, touch or bury expect Job ID in the
* message header beanstalk.jobId. Result of the operation is returned
* in beanstalk.result message header kick expects the number of jobs to
* kick in the message body and returns the number of jobs actually
* kicked out in the message header beanstalk.result.
*
* The option will be converted to a
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code>
* type.
*
* Group: common
*/
default BeanstalkEndpointConsumerBuilder command(String command) {
doSetProperty("command", command);
return this;
}
/**
* Job delay in seconds.
*
* The option is a: <code>int</code> type.
*
* Default: 0
* Group: common
*/
default BeanstalkEndpointConsumerBuilder jobDelay(int jobDelay) {
doSetProperty("jobDelay", jobDelay);
return this;
}
/**
* Job delay in seconds.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 0
* Group: common
*/
default BeanstalkEndpointConsumerBuilder jobDelay(String jobDelay) {
doSetProperty("jobDelay", jobDelay);
return this;
}
/**
* Job priority. (0 is the highest, see Beanstalk protocol).
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: common
*/
default BeanstalkEndpointConsumerBuilder jobPriority(long jobPriority) {
doSetProperty("jobPriority", jobPriority);
return this;
}
/**
* Job priority. (0 is the highest, see Beanstalk protocol).
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: common
*/
default BeanstalkEndpointConsumerBuilder jobPriority(String jobPriority) {
doSetProperty("jobPriority", jobPriority);
return this;
}
/**
* Job time to run in seconds. (when 0, the beanstalkd daemon raises it
* to 1 automatically, see Beanstalk protocol).
*
* The option is a: <code>int</code> type.
*
* Default: 60
* Group: common
*/
default BeanstalkEndpointConsumerBuilder jobTimeToRun(int jobTimeToRun) {
doSetProperty("jobTimeToRun", jobTimeToRun);
return this;
}
/**
* Job time to run in seconds. (when 0, the beanstalkd daemon raises it
* to 1 automatically, see Beanstalk protocol).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 60
* Group: common
*/
default BeanstalkEndpointConsumerBuilder jobTimeToRun(
String jobTimeToRun) {
doSetProperty("jobTimeToRun", jobTimeToRun);
return this;
}
/**
* Whether to wait for job to complete before ack the job from
* beanstalk.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder awaitJob(boolean awaitJob) {
doSetProperty("awaitJob", awaitJob);
return this;
}
/**
* Whether to wait for job to complete before ack the job from
* beanstalk.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder awaitJob(String awaitJob) {
doSetProperty("awaitJob", awaitJob);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Command to use when processing failed.
*
* The option is a:
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code>
* type.
*
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder onFailure(
BeanstalkCommand onFailure) {
doSetProperty("onFailure", onFailure);
return this;
}
/**
* Command to use when processing failed.
*
* The option will be converted to a
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code>
* type.
*
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder onFailure(String onFailure) {
doSetProperty("onFailure", onFailure);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder sendEmptyMessageWhenIdle(
boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder sendEmptyMessageWhenIdle(
String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* Whether to use blockIO.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder useBlockIO(boolean useBlockIO) {
doSetProperty("useBlockIO", useBlockIO);
return this;
}
/**
* Whether to use blockIO.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: consumer
*/
default BeanstalkEndpointConsumerBuilder useBlockIO(String useBlockIO) {
doSetProperty("useBlockIO", useBlockIO);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder backoffErrorThreshold(
int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder backoffErrorThreshold(
String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder backoffIdleThreshold(
int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder backoffIdleThreshold(
String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder backoffMultiplier(
int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder backoffMultiplier(
String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 500
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder initialDelay(
String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder runLoggingLevel(
LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder runLoggingLevel(
String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder scheduledExecutorService(
ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder scheduledExecutorService(
String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: none
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder schedulerProperties(
Map<String, Object> schedulerProperties) {
doSetProperty("schedulerProperties", schedulerProperties);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option will be converted to a
* <code>java.util.Map<java.lang.String, java.lang.Object></code>
* type.
*
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder schedulerProperties(
String schedulerProperties) {
doSetProperty("schedulerProperties", schedulerProperties);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder startScheduler(
boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder startScheduler(
String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a: <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder useFixedDelay(
boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default BeanstalkEndpointConsumerBuilder useFixedDelay(
String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Beanstalk component.
*/
public interface AdvancedBeanstalkEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default BeanstalkEndpointConsumerBuilder basic() {
return (BeanstalkEndpointConsumerBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*/
default AdvancedBeanstalkEndpointConsumerBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedBeanstalkEndpointConsumerBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedBeanstalkEndpointConsumerBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedBeanstalkEndpointConsumerBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option is a:
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedBeanstalkEndpointConsumerBuilder pollStrategy(
PollingConsumerPollStrategy pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option will be converted to a
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedBeanstalkEndpointConsumerBuilder pollStrategy(
String pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointConsumerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointConsumerBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointConsumerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointConsumerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint producers for the Beanstalk component.
*/
public interface BeanstalkEndpointProducerBuilder
extends
EndpointProducerBuilder {
default AdvancedBeanstalkEndpointProducerBuilder advanced() {
return (AdvancedBeanstalkEndpointProducerBuilder) this;
}
/**
* put means to put the job into Beanstalk. Job body is specified in the
* Camel message body. Job ID will be returned in beanstalk.jobId
* message header. delete, release, touch or bury expect Job ID in the
* message header beanstalk.jobId. Result of the operation is returned
* in beanstalk.result message header kick expects the number of jobs to
* kick in the message body and returns the number of jobs actually
* kicked out in the message header beanstalk.result.
*
* The option is a:
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code>
* type.
*
* Group: common
*/
default BeanstalkEndpointProducerBuilder command(
BeanstalkCommand command) {
doSetProperty("command", command);
return this;
}
/**
* put means to put the job into Beanstalk. Job body is specified in the
* Camel message body. Job ID will be returned in beanstalk.jobId
* message header. delete, release, touch or bury expect Job ID in the
* message header beanstalk.jobId. Result of the operation is returned
* in beanstalk.result message header kick expects the number of jobs to
* kick in the message body and returns the number of jobs actually
* kicked out in the message header beanstalk.result.
*
* The option will be converted to a
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code>
* type.
*
* Group: common
*/
default BeanstalkEndpointProducerBuilder command(String command) {
doSetProperty("command", command);
return this;
}
/**
* Job delay in seconds.
*
* The option is a: <code>int</code> type.
*
* Default: 0
* Group: common
*/
default BeanstalkEndpointProducerBuilder jobDelay(int jobDelay) {
doSetProperty("jobDelay", jobDelay);
return this;
}
/**
* Job delay in seconds.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 0
* Group: common
*/
default BeanstalkEndpointProducerBuilder jobDelay(String jobDelay) {
doSetProperty("jobDelay", jobDelay);
return this;
}
/**
* Job priority. (0 is the highest, see Beanstalk protocol).
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: common
*/
default BeanstalkEndpointProducerBuilder jobPriority(long jobPriority) {
doSetProperty("jobPriority", jobPriority);
return this;
}
/**
* Job priority. (0 is the highest, see Beanstalk protocol).
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: common
*/
default BeanstalkEndpointProducerBuilder jobPriority(String jobPriority) {
doSetProperty("jobPriority", jobPriority);
return this;
}
/**
* Job time to run in seconds. (when 0, the beanstalkd daemon raises it
* to 1 automatically, see Beanstalk protocol).
*
* The option is a: <code>int</code> type.
*
* Default: 60
* Group: common
*/
default BeanstalkEndpointProducerBuilder jobTimeToRun(int jobTimeToRun) {
doSetProperty("jobTimeToRun", jobTimeToRun);
return this;
}
/**
* Job time to run in seconds. (when 0, the beanstalkd daemon raises it
* to 1 automatically, see Beanstalk protocol).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 60
* Group: common
*/
default BeanstalkEndpointProducerBuilder jobTimeToRun(
String jobTimeToRun) {
doSetProperty("jobTimeToRun", jobTimeToRun);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default BeanstalkEndpointProducerBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default BeanstalkEndpointProducerBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Advanced builder for endpoint producers for the Beanstalk component.
*/
public interface AdvancedBeanstalkEndpointProducerBuilder
extends
EndpointProducerBuilder {
default BeanstalkEndpointProducerBuilder basic() {
return (BeanstalkEndpointProducerBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointProducerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointProducerBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointProducerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointProducerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint for the Beanstalk component.
*/
public interface BeanstalkEndpointBuilder
extends
BeanstalkEndpointConsumerBuilder,
BeanstalkEndpointProducerBuilder {
default AdvancedBeanstalkEndpointBuilder advanced() {
return (AdvancedBeanstalkEndpointBuilder) this;
}
/**
* put means to put the job into Beanstalk. Job body is specified in the
* Camel message body. Job ID will be returned in beanstalk.jobId
* message header. delete, release, touch or bury expect Job ID in the
* message header beanstalk.jobId. Result of the operation is returned
* in beanstalk.result message header kick expects the number of jobs to
* kick in the message body and returns the number of jobs actually
* kicked out in the message header beanstalk.result.
*
* The option is a:
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code>
* type.
*
* Group: common
*/
default BeanstalkEndpointBuilder command(BeanstalkCommand command) {
doSetProperty("command", command);
return this;
}
/**
* put means to put the job into Beanstalk. Job body is specified in the
* Camel message body. Job ID will be returned in beanstalk.jobId
* message header. delete, release, touch or bury expect Job ID in the
* message header beanstalk.jobId. Result of the operation is returned
* in beanstalk.result message header kick expects the number of jobs to
* kick in the message body and returns the number of jobs actually
* kicked out in the message header beanstalk.result.
*
* The option will be converted to a
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code>
* type.
*
* Group: common
*/
default BeanstalkEndpointBuilder command(String command) {
doSetProperty("command", command);
return this;
}
/**
* Job delay in seconds.
*
* The option is a: <code>int</code> type.
*
* Default: 0
* Group: common
*/
default BeanstalkEndpointBuilder jobDelay(int jobDelay) {
doSetProperty("jobDelay", jobDelay);
return this;
}
/**
* Job delay in seconds.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 0
* Group: common
*/
default BeanstalkEndpointBuilder jobDelay(String jobDelay) {
doSetProperty("jobDelay", jobDelay);
return this;
}
/**
* Job priority. (0 is the highest, see Beanstalk protocol).
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: common
*/
default BeanstalkEndpointBuilder jobPriority(long jobPriority) {
doSetProperty("jobPriority", jobPriority);
return this;
}
/**
* Job priority. (0 is the highest, see Beanstalk protocol).
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: common
*/
default BeanstalkEndpointBuilder jobPriority(String jobPriority) {
doSetProperty("jobPriority", jobPriority);
return this;
}
/**
* Job time to run in seconds. (when 0, the beanstalkd daemon raises it
* to 1 automatically, see Beanstalk protocol).
*
* The option is a: <code>int</code> type.
*
* Default: 60
* Group: common
*/
default BeanstalkEndpointBuilder jobTimeToRun(int jobTimeToRun) {
doSetProperty("jobTimeToRun", jobTimeToRun);
return this;
}
/**
* Job time to run in seconds. (when 0, the beanstalkd daemon raises it
* to 1 automatically, see Beanstalk protocol).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 60
* Group: common
*/
default BeanstalkEndpointBuilder jobTimeToRun(String jobTimeToRun) {
doSetProperty("jobTimeToRun", jobTimeToRun);
return this;
}
}
/**
* Advanced builder for endpoint for the Beanstalk component.
*/
public interface AdvancedBeanstalkEndpointBuilder
extends
AdvancedBeanstalkEndpointConsumerBuilder,
AdvancedBeanstalkEndpointProducerBuilder {
default BeanstalkEndpointBuilder basic() {
return (BeanstalkEndpointBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedBeanstalkEndpointBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Proxy enum for
* <code>org.apache.camel.component.beanstalk.BeanstalkCommand</code> enum.
*/
enum BeanstalkCommand {
bury,
release,
put,
touch,
delete,
kick;
}
public interface BeanstalkBuilders {
/**
* Beanstalk (camel-beanstalk)
* The beanstalk component is used for job retrieval and post-processing
* of Beanstalk jobs.
*
* Category: messaging
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-beanstalk
*
* Syntax: <code>beanstalk:connectionSettings</code>
*
* Path parameter: connectionSettings
* Connection settings host:port/tube
*/
default BeanstalkEndpointBuilder beanstalk(String path) {
return BeanstalkEndpointBuilderFactory.beanstalk(path);
}
}
/**
* Beanstalk (camel-beanstalk)
* The beanstalk component is used for job retrieval and post-processing of
* Beanstalk jobs.
*
* Category: messaging
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-beanstalk
*
* Syntax: <code>beanstalk:connectionSettings</code>
*
* Path parameter: connectionSettings
* Connection settings host:port/tube
*/
static BeanstalkEndpointBuilder beanstalk(String path) {
class BeanstalkEndpointBuilderImpl extends AbstractEndpointBuilder implements BeanstalkEndpointBuilder, AdvancedBeanstalkEndpointBuilder {
public BeanstalkEndpointBuilderImpl(String path) {
super("beanstalk", path);
}
}
return new BeanstalkEndpointBuilderImpl(path);
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.webkit;
import android.annotation.SystemApi;
import android.app.ActivityManagerInternal;
import android.app.AppGlobals;
import android.app.Application;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Process;
import android.os.RemoteException;
import android.os.ServiceManager;
import android.os.StrictMode;
import android.os.SystemProperties;
import android.os.Trace;
import android.text.TextUtils;
import android.util.AndroidRuntimeException;
import android.util.Log;
import com.android.server.LocalServices;
import dalvik.system.VMRuntime;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* Top level factory, used creating all the main WebView implementation classes.
*
* @hide
*/
@SystemApi
public final class WebViewFactory {
private static final String CHROMIUM_WEBVIEW_FACTORY =
"com.android.webview.chromium.WebViewChromiumFactoryProvider";
private static final String NULL_WEBVIEW_FACTORY =
"com.android.webview.nullwebview.NullWebViewFactoryProvider";
private static final String CHROMIUM_WEBVIEW_NATIVE_RELRO_32 =
"/data/misc/shared_relro/libwebviewchromium32.relro";
private static final String CHROMIUM_WEBVIEW_NATIVE_RELRO_64 =
"/data/misc/shared_relro/libwebviewchromium64.relro";
public static final String CHROMIUM_WEBVIEW_VMSIZE_SIZE_PROPERTY =
"persist.sys.webview.vmsize";
private static final long CHROMIUM_WEBVIEW_DEFAULT_VMSIZE_BYTES = 100 * 1024 * 1024;
private static final String LOGTAG = "WebViewFactory";
private static final boolean DEBUG = false;
// Cache the factory both for efficiency, and ensure any one process gets all webviews from the
// same provider.
private static WebViewFactoryProvider sProviderInstance;
private static final Object sProviderLock = new Object();
private static boolean sAddressSpaceReserved = false;
private static PackageInfo sPackageInfo;
// Error codes for loadWebViewNativeLibraryFromPackage
public static final int LIBLOAD_SUCCESS = 0;
public static final int LIBLOAD_WRONG_PACKAGE_NAME = 1;
public static final int LIBLOAD_ADDRESS_SPACE_NOT_RESERVED = 2;
public static final int LIBLOAD_FAILED_WAITING_FOR_RELRO = 3;
public static final int LIBLOAD_FAILED_LISTING_WEBVIEW_PACKAGES = 4;
// native relro loading error codes
public static final int LIBLOAD_FAILED_TO_OPEN_RELRO_FILE = 5;
public static final int LIBLOAD_FAILED_TO_LOAD_LIBRARY = 6;
public static final int LIBLOAD_FAILED_JNI_CALL = 7;
private static class MissingWebViewPackageException extends AndroidRuntimeException {
public MissingWebViewPackageException(String message) { super(message); }
public MissingWebViewPackageException(Exception e) { super(e); }
}
public static String getWebViewPackageName() {
Application initialApp = AppGlobals.getInitialApplication();
String pkg = initialApp.getString(
com.android.internal.R.string.config_alternateWebViewPackageName);
/* Attempt to use alternate WebView package first */
if (isPackageInstalled(initialApp, pkg)) {
return pkg;
}
return initialApp.getString(
com.android.internal.R.string.config_webViewPackageName);
}
private static PackageInfo fetchPackageInfo() {
PackageManager pm = AppGlobals.getInitialApplication().getPackageManager();
try {
return pm.getPackageInfo(getWebViewPackageName(), PackageManager.GET_META_DATA);
} catch (PackageManager.NameNotFoundException e) {
throw new MissingWebViewPackageException(e);
}
}
// throws MissingWebViewPackageException
private static ApplicationInfo getWebViewApplicationInfo() {
if (sPackageInfo == null) {
return fetchPackageInfo().applicationInfo;
} else {
return sPackageInfo.applicationInfo;
}
}
private static String getWebViewLibrary(ApplicationInfo ai) {
if (ai.metaData != null)
return ai.metaData.getString("com.android.webview.WebViewLibrary");
return null;
}
public static PackageInfo getLoadedPackageInfo() {
return sPackageInfo;
}
/**
* Load the native library for the given package name iff that package
* name is the same as the one providing the webview.
*/
public static int loadWebViewNativeLibraryFromPackage(String packageName) {
sPackageInfo = fetchPackageInfo();
if (packageName != null && packageName.equals(sPackageInfo.packageName)) {
return loadNativeLibrary();
}
return LIBLOAD_WRONG_PACKAGE_NAME;
}
static WebViewFactoryProvider getProvider() {
synchronized (sProviderLock) {
// For now the main purpose of this function (and the factory abstraction) is to keep
// us honest and minimize usage of WebView internals when binding the proxy.
if (sProviderInstance != null) return sProviderInstance;
final int uid = android.os.Process.myUid();
if (uid == android.os.Process.ROOT_UID || uid == android.os.Process.SYSTEM_UID) {
throw new UnsupportedOperationException(
"For security reasons, WebView is not allowed in privileged processes");
}
Trace.traceBegin(Trace.TRACE_TAG_WEBVIEW, "WebViewFactory.getProvider()");
try {
Class<WebViewFactoryProvider> providerClass = getProviderClass();
StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads();
Trace.traceBegin(Trace.TRACE_TAG_WEBVIEW, "providerClass.newInstance()");
try {
sProviderInstance = providerClass.getConstructor(WebViewDelegate.class)
.newInstance(new WebViewDelegate());
if (DEBUG) Log.v(LOGTAG, "Loaded provider: " + sProviderInstance);
return sProviderInstance;
} catch (Exception e) {
Log.e(LOGTAG, "error instantiating provider", e);
throw new AndroidRuntimeException(e);
} finally {
Trace.traceEnd(Trace.TRACE_TAG_WEBVIEW);
StrictMode.setThreadPolicy(oldPolicy);
}
} finally {
Trace.traceEnd(Trace.TRACE_TAG_WEBVIEW);
}
}
}
private static Class<WebViewFactoryProvider> getProviderClass() {
try {
// First fetch the package info so we can log the webview package version.
sPackageInfo = fetchPackageInfo();
Log.i(LOGTAG, "Loading " + sPackageInfo.packageName + " version " +
sPackageInfo.versionName + " (code " + sPackageInfo.versionCode + ")");
Trace.traceBegin(Trace.TRACE_TAG_WEBVIEW, "WebViewFactory.loadNativeLibrary()");
loadNativeLibrary();
Trace.traceEnd(Trace.TRACE_TAG_WEBVIEW);
Trace.traceBegin(Trace.TRACE_TAG_WEBVIEW, "WebViewFactory.getChromiumProviderClass()");
try {
return getChromiumProviderClass();
} catch (ClassNotFoundException e) {
Log.e(LOGTAG, "error loading provider", e);
throw new AndroidRuntimeException(e);
} finally {
Trace.traceEnd(Trace.TRACE_TAG_WEBVIEW);
}
} catch (MissingWebViewPackageException e) {
// If the package doesn't exist, then try loading the null WebView instead.
// If that succeeds, then this is a device without WebView support; if it fails then
// swallow the failure, complain that the real WebView is missing and rethrow the
// original exception.
try {
return (Class<WebViewFactoryProvider>) Class.forName(NULL_WEBVIEW_FACTORY);
} catch (ClassNotFoundException e2) {
// Ignore.
}
Log.e(LOGTAG, "Chromium WebView package does not exist", e);
throw new AndroidRuntimeException(e);
}
}
// throws MissingWebViewPackageException
private static Class<WebViewFactoryProvider> getChromiumProviderClass()
throws ClassNotFoundException {
Application initialApplication = AppGlobals.getInitialApplication();
try {
// Construct a package context to load the Java code into the current app.
Context webViewContext = initialApplication.createPackageContext(
sPackageInfo.packageName,
Context.CONTEXT_INCLUDE_CODE | Context.CONTEXT_IGNORE_SECURITY);
initialApplication.getAssets().addAssetPath(
webViewContext.getApplicationInfo().sourceDir);
ClassLoader clazzLoader = webViewContext.getClassLoader();
Trace.traceBegin(Trace.TRACE_TAG_WEBVIEW, "Class.forName()");
try {
return (Class<WebViewFactoryProvider>) Class.forName(CHROMIUM_WEBVIEW_FACTORY, true,
clazzLoader);
} finally {
Trace.traceEnd(Trace.TRACE_TAG_WEBVIEW);
}
} catch (PackageManager.NameNotFoundException e) {
throw new MissingWebViewPackageException(e);
}
}
/**
* Perform any WebView loading preparations that must happen in the zygote.
* Currently, this means allocating address space to load the real JNI library later.
*/
public static void prepareWebViewInZygote() {
try {
System.loadLibrary("webviewchromium_loader");
long addressSpaceToReserve =
SystemProperties.getLong(CHROMIUM_WEBVIEW_VMSIZE_SIZE_PROPERTY,
CHROMIUM_WEBVIEW_DEFAULT_VMSIZE_BYTES);
sAddressSpaceReserved = nativeReserveAddressSpace(addressSpaceToReserve);
if (sAddressSpaceReserved) {
if (DEBUG) {
Log.v(LOGTAG, "address space reserved: " + addressSpaceToReserve + " bytes");
}
} else {
Log.e(LOGTAG, "reserving " + addressSpaceToReserve +
" bytes of address space failed");
}
} catch (Throwable t) {
// Log and discard errors at this stage as we must not crash the zygote.
Log.e(LOGTAG, "error preparing native loader", t);
}
}
/**
* Perform any WebView loading preparations that must happen at boot from the system server,
* after the package manager has started or after an update to the webview is installed.
* This must be called in the system server.
* Currently, this means spawning the child processes which will create the relro files.
*/
public static void prepareWebViewInSystemServer() {
String[] nativePaths = null;
try {
nativePaths = getWebViewNativeLibraryPaths();
} catch (Throwable t) {
// Log and discard errors at this stage as we must not crash the system server.
Log.e(LOGTAG, "error preparing webview native library", t);
}
prepareWebViewInSystemServer(nativePaths);
}
private static void prepareWebViewInSystemServer(String[] nativeLibraryPaths) {
if (DEBUG) Log.v(LOGTAG, "creating relro files");
// We must always trigger createRelRo regardless of the value of nativeLibraryPaths. Any
// unexpected values will be handled there to ensure that we trigger notifying any process
// waiting on relreo creation.
if (Build.SUPPORTED_32_BIT_ABIS.length > 0) {
if (DEBUG) Log.v(LOGTAG, "Create 32 bit relro");
createRelroFile(false /* is64Bit */, nativeLibraryPaths);
}
if (Build.SUPPORTED_64_BIT_ABIS.length > 0) {
if (DEBUG) Log.v(LOGTAG, "Create 64 bit relro");
createRelroFile(true /* is64Bit */, nativeLibraryPaths);
}
}
public static void onWebViewUpdateInstalled() {
String[] nativeLibs = null;
try {
nativeLibs = WebViewFactory.getWebViewNativeLibraryPaths();
if (nativeLibs != null) {
long newVmSize = 0L;
for (String path : nativeLibs) {
if (path == null || TextUtils.isEmpty(path)) continue;
if (DEBUG) Log.d(LOGTAG, "Checking file size of " + path);
File f = new File(path);
if (f.exists()) {
newVmSize = Math.max(newVmSize, f.length());
continue;
}
if (path.contains("!/")) {
String[] split = TextUtils.split(path, "!/");
if (split.length == 2) {
try (ZipFile z = new ZipFile(split[0])) {
ZipEntry e = z.getEntry(split[1]);
if (e != null && e.getMethod() == ZipEntry.STORED) {
newVmSize = Math.max(newVmSize, e.getSize());
continue;
}
}
catch (IOException e) {
Log.e(LOGTAG, "error reading APK file " + split[0] + ", ", e);
}
}
}
Log.e(LOGTAG, "error sizing load for " + path);
}
if (DEBUG) {
Log.v(LOGTAG, "Based on library size, need " + newVmSize +
" bytes of address space.");
}
// The required memory can be larger than the file on disk (due to .bss), and an
// upgraded version of the library will likely be larger, so always attempt to
// reserve twice as much as we think to allow for the library to grow during this
// boot cycle.
newVmSize = Math.max(2 * newVmSize, CHROMIUM_WEBVIEW_DEFAULT_VMSIZE_BYTES);
Log.d(LOGTAG, "Setting new address space to " + newVmSize);
SystemProperties.set(CHROMIUM_WEBVIEW_VMSIZE_SIZE_PROPERTY,
Long.toString(newVmSize));
}
} catch (Throwable t) {
// Log and discard errors at this stage as we must not crash the system server.
Log.e(LOGTAG, "error preparing webview native library", t);
}
prepareWebViewInSystemServer(nativeLibs);
}
// throws MissingWebViewPackageException
private static String getLoadFromApkPath(String apkPath,
String[] abiList,
String nativeLibFileName) {
// Search the APK for a native library conforming to a listed ABI.
try (ZipFile z = new ZipFile(apkPath)) {
for (String abi : abiList) {
final String entry = "lib/" + abi + "/" + nativeLibFileName;
ZipEntry e = z.getEntry(entry);
if (e != null && e.getMethod() == ZipEntry.STORED) {
// Return a path formatted for dlopen() load from APK.
return apkPath + "!/" + entry;
}
}
} catch (IOException e) {
throw new MissingWebViewPackageException(e);
}
return "";
}
// throws MissingWebViewPackageException
private static String[] getWebViewNativeLibraryPaths() {
ApplicationInfo ai = getWebViewApplicationInfo();
final String NATIVE_LIB_FILE_NAME = getWebViewLibrary(ai);
String path32;
String path64;
boolean primaryArchIs64bit = VMRuntime.is64BitAbi(ai.primaryCpuAbi);
if (!TextUtils.isEmpty(ai.secondaryCpuAbi)) {
// Multi-arch case.
if (primaryArchIs64bit) {
// Primary arch: 64-bit, secondary: 32-bit.
path64 = ai.nativeLibraryDir;
path32 = ai.secondaryNativeLibraryDir;
} else {
// Primary arch: 32-bit, secondary: 64-bit.
path64 = ai.secondaryNativeLibraryDir;
path32 = ai.nativeLibraryDir;
}
} else if (primaryArchIs64bit) {
// Single-arch 64-bit.
path64 = ai.nativeLibraryDir;
path32 = "";
} else {
// Single-arch 32-bit.
path32 = ai.nativeLibraryDir;
path64 = "";
}
// Form the full paths to the extracted native libraries.
// If libraries were not extracted, try load from APK paths instead.
if (!TextUtils.isEmpty(path32)) {
path32 += "/" + NATIVE_LIB_FILE_NAME;
File f = new File(path32);
if (!f.exists()) {
path32 = getLoadFromApkPath(ai.sourceDir,
Build.SUPPORTED_32_BIT_ABIS,
NATIVE_LIB_FILE_NAME);
}
}
if (!TextUtils.isEmpty(path64)) {
path64 += "/" + NATIVE_LIB_FILE_NAME;
File f = new File(path64);
if (!f.exists()) {
path64 = getLoadFromApkPath(ai.sourceDir,
Build.SUPPORTED_64_BIT_ABIS,
NATIVE_LIB_FILE_NAME);
}
}
if (DEBUG) Log.v(LOGTAG, "Native 32-bit lib: " + path32 + ", 64-bit lib: " + path64);
return new String[] { path32, path64 };
}
private static void createRelroFile(final boolean is64Bit, String[] nativeLibraryPaths) {
final String abi =
is64Bit ? Build.SUPPORTED_64_BIT_ABIS[0] : Build.SUPPORTED_32_BIT_ABIS[0];
// crashHandler is invoked by the ActivityManagerService when the isolated process crashes.
Runnable crashHandler = new Runnable() {
@Override
public void run() {
try {
Log.e(LOGTAG, "relro file creator for " + abi + " crashed. Proceeding without");
getUpdateService().notifyRelroCreationCompleted(is64Bit, false);
} catch (RemoteException e) {
Log.e(LOGTAG, "Cannot reach WebViewUpdateService. " + e.getMessage());
}
}
};
try {
if (nativeLibraryPaths == null
|| nativeLibraryPaths[0] == null || nativeLibraryPaths[1] == null) {
throw new IllegalArgumentException(
"Native library paths to the WebView RelRo process must not be null!");
}
int pid = LocalServices.getService(ActivityManagerInternal.class).startIsolatedProcess(
RelroFileCreator.class.getName(), nativeLibraryPaths, "WebViewLoader-" + abi, abi,
Process.SHARED_RELRO_UID, crashHandler);
if (pid <= 0) throw new Exception("Failed to start the relro file creator process");
} catch (Throwable t) {
// Log and discard errors as we must not crash the system server.
Log.e(LOGTAG, "error starting relro file creator for abi " + abi, t);
crashHandler.run();
}
}
private static class RelroFileCreator {
// Called in an unprivileged child process to create the relro file.
public static void main(String[] args) {
boolean result = false;
boolean is64Bit = VMRuntime.getRuntime().is64Bit();
try{
if (args.length != 2 || args[0] == null || args[1] == null) {
Log.e(LOGTAG, "Invalid RelroFileCreator args: " + Arrays.toString(args));
return;
}
Log.v(LOGTAG, "RelroFileCreator (64bit = " + is64Bit + "), " +
" 32-bit lib: " + args[0] + ", 64-bit lib: " + args[1]);
if (!sAddressSpaceReserved) {
Log.e(LOGTAG, "can't create relro file; address space not reserved");
return;
}
result = nativeCreateRelroFile(args[0] /* path32 */,
args[1] /* path64 */,
CHROMIUM_WEBVIEW_NATIVE_RELRO_32,
CHROMIUM_WEBVIEW_NATIVE_RELRO_64);
if (result && DEBUG) Log.v(LOGTAG, "created relro file");
} finally {
// We must do our best to always notify the update service, even if something fails.
try {
getUpdateService().notifyRelroCreationCompleted(is64Bit, result);
} catch (RemoteException e) {
Log.e(LOGTAG, "error notifying update service", e);
}
if (!result) Log.e(LOGTAG, "failed to create relro file");
// Must explicitly exit or else this process will just sit around after we return.
System.exit(0);
}
}
}
private static int loadNativeLibrary() {
if (!sAddressSpaceReserved) {
Log.e(LOGTAG, "can't load with relro file; address space not reserved");
return LIBLOAD_ADDRESS_SPACE_NOT_RESERVED;
}
try {
getUpdateService().waitForRelroCreationCompleted(VMRuntime.getRuntime().is64Bit());
} catch (RemoteException e) {
Log.e(LOGTAG, "error waiting for relro creation, proceeding without", e);
return LIBLOAD_FAILED_WAITING_FOR_RELRO;
}
try {
String[] args = getWebViewNativeLibraryPaths();
int result = nativeLoadWithRelroFile(args[0] /* path32 */,
args[1] /* path64 */,
CHROMIUM_WEBVIEW_NATIVE_RELRO_32,
CHROMIUM_WEBVIEW_NATIVE_RELRO_64);
if (result != LIBLOAD_SUCCESS) {
Log.w(LOGTAG, "failed to load with relro file, proceeding without");
} else if (DEBUG) {
Log.v(LOGTAG, "loaded with relro file");
}
return result;
} catch (MissingWebViewPackageException e) {
Log.e(LOGTAG, "Failed to list WebView package libraries for loadNativeLibrary", e);
return LIBLOAD_FAILED_LISTING_WEBVIEW_PACKAGES;
}
}
private static IWebViewUpdateService getUpdateService() {
return IWebViewUpdateService.Stub.asInterface(ServiceManager.getService("webviewupdate"));
}
private static boolean isPackageInstalled(Context context, String packageName) {
try {
return context.getPackageManager().getPackageInfo(packageName, 0) != null;
} catch (PackageManager.NameNotFoundException e) {
return false;
}
}
private static native boolean nativeReserveAddressSpace(long addressSpaceToReserve);
private static native boolean nativeCreateRelroFile(String lib32, String lib64,
String relro32, String relro64);
private static native int nativeLoadWithRelroFile(String lib32, String lib64,
String relro32, String relro64);
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.builder.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.drools.compiler.compiler.PackageRegistry;
import org.drools.compiler.compiler.TypeDeclarationError;
import org.drools.compiler.lang.descr.AbstractClassTypeDeclarationDescr;
import org.drools.compiler.lang.descr.EnumDeclarationDescr;
import org.drools.compiler.lang.descr.TypeDeclarationDescr;
import org.drools.compiler.lang.descr.TypeFieldDescr;
import org.drools.core.factmodel.FieldDefinition;
import org.drools.core.factmodel.GeneratedFact;
import org.drools.core.rule.TypeDeclaration;
import org.drools.core.util.asm.ClassFieldInspector;
import org.kie.api.definition.type.FactField;
import org.kie.api.definition.type.PropertyChangeSupport;
import org.kie.api.definition.type.Role;
import org.kie.api.definition.type.TypeSafe;
public class TypeDeclarationFactory {
protected KnowledgeBuilderImpl kbuilder;
public TypeDeclarationFactory( KnowledgeBuilderImpl kbuilder ) {
this.kbuilder = kbuilder;
}
public TypeDeclaration processTypeDeclaration( PackageRegistry pkgRegistry,
AbstractClassTypeDeclarationDescr typeDescr ) {
TypeDeclaration type = kbuilder.getTypeBuilder().getExistingTypeDeclaration( typeDescr.getFullTypeName() );
if (type == null) {
type = new TypeDeclaration( typeDescr.getTypeName() );
type.setResource( typeDescr.getResource() );
// if is not new, search the already existing declaration and
// compare them o see if they are at least compatibles
// check whether it is necessary to build the class or not
Class<?> existingClass = TypeDeclarationUtils.getExistingDeclarationClass( typeDescr, pkgRegistry );
type.setTypeClass( existingClass );
type.setNovel( existingClass == null );
type.setNature( existingClass == null ? TypeDeclaration.Nature.DEFINITION : TypeDeclaration.Nature.DECLARATION );
}
processTypeAnnotations(typeDescr, type);
return type;
}
private void processTypeAnnotations( AbstractClassTypeDeclarationDescr typeDescr, TypeDeclaration type ) {
try {
processAnnotations( typeDescr, type );
} catch (Exception e) {
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr, e.getMessage() ) );
}
}
public static void processAnnotations( AbstractClassTypeDeclarationDescr typeDescr, TypeDeclaration type ) {
Role role = typeDescr.getTypedAnnotation(Role.class);
if (role != null) {
type.setRole(role.value());
}
TypeSafe typeSafe = typeDescr.getTypedAnnotation(TypeSafe.class);
if (typeSafe != null) {
type.setTypesafe(typeSafe.value());
}
if (typeDescr instanceof EnumDeclarationDescr ) {
type.setKind(TypeDeclaration.Kind.ENUM);
} else if (typeDescr instanceof TypeDeclarationDescr && ((TypeDeclarationDescr)typeDescr).isTrait()) {
type.setKind(TypeDeclaration.Kind.TRAIT);
}
type.setDynamic( typeDescr.hasAnnotation(PropertyChangeSupport.class) );
}
protected void checkRedeclaration( AbstractClassTypeDeclarationDescr typeDescr, TypeDeclaration type, PackageRegistry pkgRegistry ) {
TypeDeclaration previousTypeDeclaration = kbuilder.getPackageRegistry( typeDescr.getNamespace() ).getPackage().getTypeDeclaration( typeDescr.getTypeName() );
try {
// if there is no previous declaration, then the original declaration was a POJO
// to the behavior previous these changes
if ( !type.isDefinition() ) {
// new declarations of a POJO can't declare new fields,
// except if the POJO was previously generated/compiled and saved into the kjar
Class<?> existingDeclarationClass = TypeDeclarationUtils.getExistingDeclarationClass( typeDescr, pkgRegistry );
if ( ! kbuilder.getBuilderConfiguration().isPreCompiled() &&
! GeneratedFact.class.isAssignableFrom( existingDeclarationClass ) &&
! type.getTypeClassDef().getFields().isEmpty()
) {
try {
Class existingClass = pkgRegistry.getPackage().getTypeResolver().resolveType( typeDescr.getType().getFullName() );
ClassFieldInspector cfi = new ClassFieldInspector( existingClass );
int fieldCount = 0;
for ( String existingFieldName : cfi.getFieldTypesField().keySet() ) {
if ( ! cfi.isNonGetter( existingFieldName )
&& ! "class".equals( existingFieldName )
&& cfi.getSetterMethods().containsKey( existingFieldName )
&& cfi.getGetterMethods().containsKey( existingFieldName )
) {
if ( ! typeDescr.getFields().containsKey( existingFieldName ) ) {
type.setValid(false);
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr, "New declaration of "+typeDescr.getType().getFullName() +
" does not include field " + existingFieldName ) );
} else {
String fldType = cfi.getFieldType( existingFieldName ).getName();
fldType = TypeDeclarationUtils.toBuildableType( fldType, kbuilder.getRootClassLoader() );
TypeFieldDescr declaredField = typeDescr.getFields().get( existingFieldName );
if ( ! fldType.equals( type.getTypeClassDef().getField( existingFieldName ).getTypeName() ) ) {
type.setValid(false);
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr, "New declaration of "+typeDescr.getType().getFullName() +
" redeclared field " + existingFieldName + " : \n" +
"existing : " + fldType + " vs declared : " + declaredField.getPattern().getObjectType() ) );
} else {
fieldCount++;
}
}
}
}
if ( fieldCount != typeDescr.getFields().size() ) {
kbuilder.addBuilderResult( reportDeclarationDiff( cfi, typeDescr ) );
}
} catch ( IOException e ) {
e.printStackTrace();
type.setValid(false);
kbuilder.addBuilderResult( new TypeDeclarationError( typeDescr, "Unable to redeclare " + typeDescr.getType().getFullName() + " : " + e.getMessage() ) );
} catch ( ClassNotFoundException e ) {
type.setValid(false);
kbuilder.addBuilderResult( new TypeDeclarationError( typeDescr, "Unable to redeclare " + typeDescr.getType().getFullName() + " : " + e.getMessage() ) );
}
}
} else if (previousTypeDeclaration != null) { // previous declaration can be null during an incremental compilation
int typeComparisonResult = this.compareTypeDeclarations(previousTypeDeclaration, type);
if (typeComparisonResult < 0) {
//oldDeclaration is "less" than newDeclaration -> error
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr, typeDescr.getType().getFullName()
+ " declares more fields than the already existing version"));
type.setValid(false);
} else if (typeComparisonResult > 0 && !type.getTypeClassDef().getFields().isEmpty()) {
//oldDeclaration is "grater" than newDeclaration -> error
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr, typeDescr.getType().getFullName()
+ " declares less fields than the already existing version"));
type.setValid(false);
}
//if they are "equal" -> no problem
// in the case of a declaration, we need to copy all the
// fields present in the previous declaration
if (type.getNature() == TypeDeclaration.Nature.DECLARATION) {
mergeTypeDeclarations(previousTypeDeclaration, type);
}
}
} catch (IncompatibleClassChangeError error) {
//if the types are incompatible -> error
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr, error.getMessage()));
}
}
/**
* Merges all the missing FactFields from oldDefinition into newDeclaration.
*/
protected void mergeTypeDeclarations(TypeDeclaration oldDeclaration,
TypeDeclaration newDeclaration) {
if (oldDeclaration == null) {
return;
}
//add the missing fields (if any) to newDeclaration
for (FieldDefinition oldFactField : oldDeclaration.getTypeClassDef().getFieldsDefinitions()) {
FieldDefinition newFactField = newDeclaration.getTypeClassDef().getField(oldFactField.getName());
if (newFactField == null) {
newDeclaration.getTypeClassDef().addField(oldFactField);
}
}
//copy the defined class
newDeclaration.setTypeClass( oldDeclaration.getTypeClass() );
}
protected int compareTypeDeclarations(TypeDeclaration oldDeclaration,
TypeDeclaration newDeclaration) throws IncompatibleClassChangeError {
//different formats -> incompatible
if (!oldDeclaration.getFormat().equals(newDeclaration.getFormat())) {
throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + " has a different"
+ " format that its previous definition: " + newDeclaration.getFormat() + "!=" + oldDeclaration.getFormat());
}
//different superclasses -> Incompatible (TODO: check for hierarchy)
if (!oldDeclaration.getTypeClassDef().getSuperClass().equals(newDeclaration.getTypeClassDef().getSuperClass())) {
if (oldDeclaration.getNature() == TypeDeclaration.Nature.DEFINITION
&& newDeclaration.getNature() == TypeDeclaration.Nature.DECLARATION
&& Object.class.getName().equals(newDeclaration.getTypeClassDef().getSuperClass())) {
// actually do nothing. The new declaration just recalls the previous definition, probably to extend it.
} else {
throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + " has a different"
+ " superclass that its previous definition: " + newDeclaration.getTypeClassDef().getSuperClass()
+ " != " + oldDeclaration.getTypeClassDef().getSuperClass());
}
}
//different duration -> Incompatible
if (!nullSafeEqualityComparison(oldDeclaration.getDurationAttribute(), newDeclaration.getDurationAttribute())) {
throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + " has a different"
+ " duration: " + newDeclaration.getDurationAttribute()
+ " != " + oldDeclaration.getDurationAttribute());
}
// //different masks -> incompatible
if (newDeclaration.getNature().equals(TypeDeclaration.Nature.DEFINITION)) {
if (oldDeclaration.getSetMask() != newDeclaration.getSetMask()) {
throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + " is incompatible with"
+ " the previous definition: " + newDeclaration
+ " != " + oldDeclaration);
}
}
//TODO: further comparison?
//Field comparison
List<FactField> oldFields = oldDeclaration.getTypeClassDef().getFields();
Map<String, FactField> newFieldsMap = new HashMap<String, FactField>();
for (FactField factField : newDeclaration.getTypeClassDef().getFields()) {
newFieldsMap.put(factField.getName(), factField);
}
//each of the fields in the old definition that are also present in the
//new definition must have the same type. If not -> Incompatible
boolean allFieldsInOldDeclarationAreStillPresent = true;
for (FactField oldFactField : oldFields) {
FactField newFactField = newFieldsMap.get(oldFactField.getName());
if (newFactField != null) {
//we can't use newFactField.getType() since it throws a NPE at this point.
String newFactType = ((FieldDefinition) newFactField).getTypeName();
if (!newFactType.equals( ((FieldDefinition) oldFactField).getTypeName())) {
throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + "." + newFactField.getName() + " has a different"
+ " type that its previous definition: " + newFactType
+ " != " + oldFactField.getType().getCanonicalName());
}
} else {
allFieldsInOldDeclarationAreStillPresent = false;
}
}
//If the old declaration has less fields than the new declaration, oldDefinition < newDefinition
if (oldFields.size() < newFieldsMap.size()) {
return -1;
}
//If the old declaration has more fields than the new declaration, oldDefinition > newDefinition
if (oldFields.size() > newFieldsMap.size()) {
return 1;
}
//If the old declaration has the same fields as the new declaration,
//and all the fieds present in the old declaration are also present in
//the new declaration, then they are considered "equal", otherwise
//they are incompatible
if (allFieldsInOldDeclarationAreStillPresent) {
return 0;
}
//Both declarations have the same number of fields, but not all the
//fields in the old declaration are present in the new declaration.
throw new IncompatibleClassChangeError(newDeclaration.getTypeName() + " introduces"
+ " fields that are not present in its previous version.");
}
protected boolean nullSafeEqualityComparison(Comparable c1,
Comparable c2) {
if (c1 == null) {
return c2 == null;
}
return c2 != null && c1.compareTo(c2) == 0;
}
private TypeDeclarationError reportDeclarationDiff( ClassFieldInspector cfi, AbstractClassTypeDeclarationDescr typeDescr) {
List<String> existing = new ArrayList<String>();
for ( String existingFieldName : cfi.getFieldTypesField().keySet() ) {
if ( ! cfi.isNonGetter( existingFieldName ) && ! "class".equals( existingFieldName ) && cfi.getSetterMethods().containsKey( existingFieldName ) ) {
existing.add( existingFieldName );
}
}
Collections.sort( existing );
List<String> declared = new ArrayList<String>( typeDescr.getFields().keySet() );
Collections.sort( declared );
List<String> deltas = new ArrayList<String>();
for ( String s : existing ) {
if ( ! declared.contains( s ) ) {
deltas.add( "--" + s );
}
}
for ( String s : declared ) {
if ( ! existing.contains( s ) ) {
deltas.add( "++" + s );
}
}
return new TypeDeclarationError( typeDescr, "New declaration of " + typeDescr.getType().getFullName() +
" can't declare a different set of fields \n" +
"existing : " + existing + "\n" +
"declared : " + declared + "\n" +
"diff : " + deltas );
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.openstacknetworking.routing;
import org.onlab.packet.Ethernet;
import org.onlab.packet.IPv4;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.IpAddress;
import org.onlab.packet.IpPrefix;
import org.onlab.packet.MacAddress;
import org.onlab.packet.TCP;
import org.onlab.packet.TpPort;
import org.onlab.packet.UDP;
import org.onosproject.core.ApplicationId;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Port;
import org.onosproject.net.PortNumber;
import org.onosproject.net.behaviour.ExtensionTreatmentResolver;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.driver.DefaultDriverData;
import org.onosproject.net.driver.DefaultDriverHandler;
import org.onosproject.net.driver.Driver;
import org.onosproject.net.driver.DriverHandler;
import org.onosproject.net.driver.DriverService;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.flow.instructions.ExtensionPropertyException;
import org.onosproject.net.flow.instructions.ExtensionTreatment;
import org.onosproject.net.flow.instructions.ExtensionTreatmentType;
import org.onosproject.net.flowobjective.DefaultForwardingObjective;
import org.onosproject.net.flowobjective.FlowObjectiveService;
import org.onosproject.net.flowobjective.ForwardingObjective;
import org.onosproject.net.packet.InboundPacket;
import org.onosproject.openstackinterface.OpenstackInterfaceService;
import org.onosproject.openstackinterface.OpenstackPort;
import org.onosproject.openstackinterface.OpenstackRouterInterface;
import org.onosproject.openstackinterface.OpenstackSubnet;
import org.onosproject.openstackinterface.OpenstackFloatingIP;
import org.onosproject.openstacknetworking.OpenstackNetworkingConfig;
import org.onosproject.openstacknetworking.OpenstackPortInfo;
import org.onosproject.openstacknetworking.OpenstackRoutingService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.stream.StreamSupport;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.onlab.osgi.DefaultServiceDirectory.getService;
/**
* Populates Routing Flow Rules.
*/
public class OpenstackRoutingRulePopulator {
private final Logger log = LoggerFactory.getLogger(getClass());
private final ApplicationId appId;
private final FlowObjectiveService flowObjectiveService;
private final OpenstackInterfaceService openstackService;
private final DeviceService deviceService;
private final DriverService driverService;
private final OpenstackNetworkingConfig config;
private static final String PORTNAME_PREFIX_TUNNEL = "vxlan";
private static final String PORTNAME = "portName";
private static final String PORTNAME_PREFIX_VM = "tap";
private static final String PORTNOTNULL = "Port can not be null";
private static final String DEVICENOTNULL = "Device can not be null";
private static final String TUNNEL_DESTINATION = "tunnelDst";
private static final int ROUTING_RULE_PRIORITY = 25000;
private static final int FLOATING_RULE_PRIORITY = 42000;
private static final int PNAT_RULE_PRIORITY = 26000;
private static final int PNAT_TIMEOUT = 120;
private static final int PREFIX_LENGTH = 32;
private static final MacAddress GATEWAYMAC = MacAddress.valueOf("1f:1f:1f:1f:1f:1f");
private InboundPacket inboundPacket;
private OpenstackPort openstackPort;
private int portNum;
private MacAddress externalInterface;
private MacAddress externalRouter;
/**
* The constructor of openstackRoutingRulePopulator.
*
* @param appId Caller`s appId
* @param openstackService Opestack REST request handler
* @param flowObjectiveService FlowObjectiveService
* @param deviceService DeviceService
* @param driverService DriverService
* @param config Configuration for openstack environment
*/
public OpenstackRoutingRulePopulator(ApplicationId appId, OpenstackInterfaceService openstackService,
FlowObjectiveService flowObjectiveService, DeviceService deviceService,
DriverService driverService, OpenstackNetworkingConfig config) {
this.appId = appId;
this.flowObjectiveService = flowObjectiveService;
this.openstackService = checkNotNull(openstackService);
this.deviceService = deviceService;
this.driverService = driverService;
this.config = config;
}
/**
* Populates flow rules for Pnat configurations.
*
* @param inboundPacket Packet-in event packet
* @param openstackPort Target VM information
* @param portNum Pnat port number
* @param externalIp external ip address
* @param externalInterfaceMacAddress Gateway external interface macaddress
* @param externalRouterMacAddress Outer(physical) router`s macaddress
*/
public void populatePnatFlowRules(InboundPacket inboundPacket, OpenstackPort openstackPort, int portNum,
Ip4Address externalIp, MacAddress externalInterfaceMacAddress,
MacAddress externalRouterMacAddress) {
this.inboundPacket = inboundPacket;
this.openstackPort = openstackPort;
this.portNum = portNum;
this.externalInterface = externalInterfaceMacAddress;
this.externalRouter = externalRouterMacAddress;
long vni = getVni(openstackPort.networkId());
populatePnatIncomingFlowRules(vni, externalIp);
populatePnatOutgoingFlowRules(vni, externalIp);
}
private void populatePnatOutgoingFlowRules(long vni, Ip4Address externalIp) {
IPv4 iPacket = (IPv4) inboundPacket.parsed().getPayload();
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchIPProtocol(iPacket.getProtocol())
.matchTunnelId(vni)
.matchIPSrc(IpPrefix.valueOf(iPacket.getSourceAddress(), 32))
.matchIPDst(IpPrefix.valueOf(iPacket.getDestinationAddress(), 32));
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
tBuilder.setEthSrc(externalInterface)
.setEthDst(externalRouter)
.setIpSrc(externalIp);
switch (iPacket.getProtocol()) {
case IPv4.PROTOCOL_TCP:
TCP tcpPacket = (TCP) iPacket.getPayload();
sBuilder.matchTcpSrc(TpPort.tpPort(tcpPacket.getSourcePort()))
.matchTcpDst(TpPort.tpPort(tcpPacket.getDestinationPort()));
tBuilder.setTcpSrc(TpPort.tpPort(portNum));
break;
case IPv4.PROTOCOL_UDP:
UDP udpPacket = (UDP) iPacket.getPayload();
sBuilder.matchUdpSrc(TpPort.tpPort(udpPacket.getSourcePort()))
.matchUdpDst(TpPort.tpPort(udpPacket.getDestinationPort()));
tBuilder.setUdpSrc(TpPort.tpPort(portNum));
break;
default:
log.debug("Unsupported IPv4 protocol {}");
break;
}
Port port = checkNotNull(getPortOfExternalInterface(), PORTNOTNULL);
tBuilder.setOutput(port.number());
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withFlag(ForwardingObjective.Flag.VERSATILE)
.withPriority(PNAT_RULE_PRIORITY)
.makeTemporary(PNAT_TIMEOUT)
.fromApp(appId)
.add();
flowObjectiveService.forward(inboundPacket.receivedFrom().deviceId(), fo);
}
private Port getPortOfExternalInterface() {
return deviceService.getPorts(getGatewayNode().id()).stream()
.filter(p -> p.annotations().value(PORTNAME).equals(config.gatewayExternalInterfaceName()))
.findAny().orElse(null);
}
private void populatePnatIncomingFlowRules(long vni, Ip4Address externalIp) {
IPv4 iPacket = (IPv4) inboundPacket.parsed().getPayload();
DeviceId deviceId = inboundPacket.receivedFrom().deviceId();
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchIPProtocol(iPacket.getProtocol())
.matchIPDst(IpPrefix.valueOf(externalIp, 32))
.matchIPSrc(IpPrefix.valueOf(iPacket.getDestinationAddress(), 32));
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
tBuilder.setTunnelId(vni)
.setEthDst(inboundPacket.parsed().getSourceMAC())
.setIpDst(IpAddress.valueOf(iPacket.getSourceAddress()));
switch (iPacket.getProtocol()) {
case IPv4.PROTOCOL_TCP:
TCP tcpPacket = (TCP) iPacket.getPayload();
sBuilder.matchTcpSrc(TpPort.tpPort(tcpPacket.getDestinationPort()))
.matchTcpDst(TpPort.tpPort(portNum));
tBuilder.setTcpDst(TpPort.tpPort(tcpPacket.getSourcePort()));
break;
case IPv4.PROTOCOL_UDP:
UDP udpPacket = (UDP) iPacket.getPayload();
sBuilder.matchUdpSrc(TpPort.tpPort(udpPacket.getDestinationPort()))
.matchUdpDst(TpPort.tpPort(portNum));
tBuilder.setUdpDst(TpPort.tpPort(udpPacket.getSourcePort()));
break;
default:
break;
}
tBuilder.extension(buildNiciraExtenstion(deviceId, getHostIpfromOpenstackPort(openstackPort)), deviceId)
.setOutput(getTunnelPort(deviceId));
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withFlag(ForwardingObjective.Flag.VERSATILE)
.withPriority(PNAT_RULE_PRIORITY)
.makeTemporary(PNAT_TIMEOUT)
.fromApp(appId)
.add();
flowObjectiveService.forward(inboundPacket.receivedFrom().deviceId(), fo);
}
private Ip4Address getHostIpfromOpenstackPort(OpenstackPort openstackPort) {
Device device = getDevicefromOpenstackPort(openstackPort);
return config.nodes().get(device.id());
}
private Device getDevicefromOpenstackPort(OpenstackPort openstackPort) {
String openstackPortName = PORTNAME_PREFIX_VM + openstackPort.id().substring(0, 11);
Device device = StreamSupport.stream(deviceService.getDevices().spliterator(), false)
.filter(d -> findPortinDevice(d, openstackPortName))
.iterator()
.next();
checkNotNull(device, DEVICENOTNULL);
return device;
}
private boolean findPortinDevice(Device d, String openstackPortName) {
Port port = deviceService.getPorts(d.id())
.stream()
.filter(p -> p.isEnabled() && p.annotations().value(PORTNAME).equals(openstackPortName))
.findAny()
.orElse(null);
return port != null;
}
/**
* Builds Nicira extension for tagging remoteIp of vxlan.
*
* @param id Device Id of vxlan source device
* @param hostIp Remote Ip of vxlan destination device
* @return NiciraExtension Treatment
*/
public ExtensionTreatment buildNiciraExtenstion(DeviceId id, Ip4Address hostIp) {
Driver driver = driverService.getDriver(id);
DriverHandler driverHandler = new DefaultDriverHandler(new DefaultDriverData(driver, id));
ExtensionTreatmentResolver resolver = driverHandler.behaviour(ExtensionTreatmentResolver.class);
ExtensionTreatment extensionInstruction =
resolver.getExtensionInstruction(
ExtensionTreatmentType.ExtensionTreatmentTypes.NICIRA_SET_TUNNEL_DST.type());
try {
extensionInstruction.setPropertyValue(TUNNEL_DESTINATION, hostIp);
} catch (ExtensionPropertyException e) {
log.error("Error setting Nicira extension setting {}", e);
}
return extensionInstruction;
}
/**
* Returns port number of vxlan tunnel.
*
* @param deviceId Target Device Id
* @return PortNumber
*/
public PortNumber getTunnelPort(DeviceId deviceId) {
Port port = deviceService.getPorts(deviceId).stream()
.filter(p -> p.annotations().value(PORTNAME).equals(PORTNAME_PREFIX_TUNNEL))
.findAny().orElse(null);
if (port == null) {
log.error("No TunnelPort was created.");
return null;
}
return port.number();
}
/**
* Populates flow rules from openstackComputeNode to GatewayNode.
*
* @param vni Target network
*/
public void populateExternalRules(long vni) {
// 1. computeNode to gateway
populateComputeNodeRules(vni);
// 2. gatewayNode to controller
populateRuleGatewaytoController(vni);
}
private void populateRuleGatewaytoController(long vni) {
Device gatewayDevice = getGatewayNode();
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchTunnelId(vni)
.matchEthDst(GATEWAYMAC);
tBuilder.setOutput(PortNumber.CONTROLLER);
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withFlag(ForwardingObjective.Flag.VERSATILE)
.withPriority(ROUTING_RULE_PRIORITY)
.fromApp(appId)
.add();
flowObjectiveService.forward(gatewayDevice.id(), fo);
}
private void populateComputeNodeRules(long vni) {
Device gatewayDevice = getGatewayNode();
StreamSupport.stream(deviceService.getDevices().spliterator(), false)
.filter(d -> !checkGatewayNode(d.id()))
.forEach(d -> populateRuleToGateway(d, gatewayDevice, vni));
}
private void populateRuleToGateway(Device d, Device gatewayDevice, long vni) {
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchTunnelId(vni)
.matchEthDst(GATEWAYMAC);
tBuilder.extension(buildNiciraExtenstion(d.id(), config.nodes().get(gatewayDevice.id())), d.id())
.setOutput(getTunnelPort(d.id()));
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withFlag(ForwardingObjective.Flag.SPECIFIC)
.withPriority(ROUTING_RULE_PRIORITY)
.fromApp(appId)
.add();
flowObjectiveService.forward(d.id(), fo);
}
private Device getGatewayNode() {
return checkNotNull(deviceService.getDevice(DeviceId.deviceId(config.gatewayBridgeId())));
}
private boolean checkGatewayNode(DeviceId deviceId) {
return deviceId.toString().equals(config.gatewayBridgeId());
}
private long getVni(String netId) {
return Long.parseLong(openstackService.network(netId).segmentId());
}
/**
* Remove flow rules for external connection.
*
* @param routerInterface Corresponding routerInterface
*/
public void removeExternalRules(OpenstackRouterInterface routerInterface) {
OpenstackSubnet openstackSubnet = openstackService.subnet(routerInterface.subnetId());
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchTunnelId(getVni(openstackSubnet.networkId()))
.matchEthDst(GATEWAYMAC);
StreamSupport.stream(deviceService.getDevices().spliterator(), false)
.forEach(d -> {
ForwardingObjective.Flag flag = checkGatewayNode(d.id()) ?
ForwardingObjective.Flag.VERSATILE :
ForwardingObjective.Flag.SPECIFIC;
removeRule(d.id(), sBuilder, flag, ROUTING_RULE_PRIORITY);
});
}
private void removeRule(DeviceId id, TrafficSelector.Builder sBuilder,
ForwardingObjective.Flag flag, int priority) {
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withFlag(flag)
.withPriority(priority)
.fromApp(appId)
.remove();
flowObjectiveService.forward(id, fo);
}
/**
* Populates flow rules for floatingIp configuration.
*
* @param floatingIP Corresponding floating ip information
*/
public void populateFloatingIpRules(OpenstackFloatingIP floatingIP) {
OpenstackPort port = openstackService.port(floatingIP.portId());
//1. incoming rules
populateFloatingIpIncomingRules(floatingIP, port);
//2. outgoing rules
populateFloatingIpOutgoingRules(floatingIP, port);
}
private void populateFloatingIpIncomingRules(OpenstackFloatingIP floatingIP, OpenstackPort port) {
DeviceId portDeviceId = getDevicefromOpenstackPort(port).id();
Device gatewayNode = getGatewayNode();
Device portNode = deviceService.getDevice(portDeviceId);
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchIPDst(IpPrefix.valueOf(floatingIP.floatingIpAddress(), PREFIX_LENGTH));
tBuilder.setEthSrc(GATEWAYMAC)
.setEthDst(port.macAddress())
.setIpDst(floatingIP.fixedIpAddress())
.setTunnelId(getVni(port.networkId()))
.extension(buildNiciraExtenstion(gatewayNode.id(),
config.nodes().get(portNode.id())), gatewayNode.id())
.setOutput(getTunnelPort(gatewayNode.id()));
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withFlag(ForwardingObjective.Flag.VERSATILE)
.withPriority(FLOATING_RULE_PRIORITY)
.fromApp(appId)
.add();
flowObjectiveService.forward(getGatewayNode().id(), fo);
}
private void populateFloatingIpOutgoingRules(OpenstackFloatingIP floatingIP, OpenstackPort port) {
Port outputPort = checkNotNull(getPortOfExternalInterface(), PORTNOTNULL);
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchTunnelId(getVni(port.networkId()))
.matchIPSrc(IpPrefix.valueOf(floatingIP.fixedIpAddress(), 32));
tBuilder.setIpSrc(floatingIP.floatingIpAddress())
.setEthSrc(MacAddress.valueOf(config.gatewayExternalInterfaceMac()))
.setEthDst(MacAddress.valueOf(config.physicalRouterMac()))
.setOutput(outputPort.number());
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withFlag(ForwardingObjective.Flag.VERSATILE)
.withPriority(FLOATING_RULE_PRIORITY)
.fromApp(appId)
.add();
flowObjectiveService.forward(getGatewayNode().id(), fo);
}
/**
* Removes flow rules for floating ip configuration.
*
* @param floatingIP Corresponding floating ip information
* @param portInfo stored information about deleted vm
*/
public void removeFloatingIpRules(OpenstackFloatingIP floatingIP, OpenstackPortInfo portInfo) {
TrafficSelector.Builder sOutgoingBuilder = DefaultTrafficSelector.builder();
TrafficSelector.Builder sIncomingBuilder = DefaultTrafficSelector.builder();
sOutgoingBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchTunnelId(portInfo.vni())
.matchIPSrc(IpPrefix.valueOf(portInfo.ip(), PREFIX_LENGTH));
sIncomingBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchIPDst(IpPrefix.valueOf(floatingIP.floatingIpAddress(), PREFIX_LENGTH));
removeRule(getGatewayNode().id(), sOutgoingBuilder, ForwardingObjective.Flag.VERSATILE, FLOATING_RULE_PRIORITY);
removeRule(getGatewayNode().id(), sIncomingBuilder, ForwardingObjective.Flag.VERSATILE, FLOATING_RULE_PRIORITY);
}
/**
* Populates L3 rules for east to west traffic.
*
* @param p target VM
* @param targetList target openstackRouterInterfaces
*/
public void populateL3Rules(OpenstackPort p, List<OpenstackRouterInterface> targetList) {
Device device = getDevicefromOpenstackPort(p);
Port port = getPortFromOpenstackPort(device, p);
Ip4Address vmIp = (Ip4Address) p.fixedIps().values().iterator().next();
if (port == null) {
return;
}
targetList.forEach(routerInterface -> {
OpenstackPort openstackPort = openstackService.port(routerInterface.portId());
long vni = getVni(openstackPort.networkId());
if (vmIp == null) {
return;
}
populateL3RulestoSameNode(vmIp, p, port, device, vni);
deviceService.getAvailableDevices().forEach(d -> {
if (!d.equals(device) && !d.equals(getGatewayNode())) {
populateL3RulestoDifferentNode(vmIp, vni, d.id(), getHostIpfromOpenstackPort(p));
}
});
});
}
private void populateL3RulestoDifferentNode(Ip4Address vmIp, long vni, DeviceId deviceId, Ip4Address hostIp) {
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchTunnelId(vni)
.matchIPDst(vmIp.toIpPrefix());
tBuilder.extension(buildNiciraExtenstion(deviceId, hostIp), deviceId)
.setOutput(getTunnelPort(deviceId));
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withPriority(ROUTING_RULE_PRIORITY)
.withFlag(ForwardingObjective.Flag.SPECIFIC)
.fromApp(appId)
.add();
flowObjectiveService.forward(deviceId, fo);
}
private void populateL3RulestoSameNode(Ip4Address vmIp, OpenstackPort p, Port port, Device device, long vni) {
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
TrafficTreatment.Builder tBuilder = DefaultTrafficTreatment.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchIPDst(vmIp.toIpPrefix())
.matchTunnelId(vni);
tBuilder.setEthDst(p.macAddress())
.setOutput(port.number());
ForwardingObjective fo = DefaultForwardingObjective.builder()
.withSelector(sBuilder.build())
.withTreatment(tBuilder.build())
.withPriority(ROUTING_RULE_PRIORITY)
.withFlag(ForwardingObjective.Flag.SPECIFIC)
.fromApp(appId)
.add();
flowObjectiveService.forward(device.id(), fo);
}
private Port getPortFromOpenstackPort(Device device, OpenstackPort p) {
String openstackPortName = PORTNAME_PREFIX_VM + p.id().substring(0, 11);
return deviceService.getPorts(device.id())
.stream()
.filter(pt -> pt.annotations().value(PORTNAME).equals(openstackPortName))
.findAny()
.orElse(null);
}
/**
* Removes L3 rules for routerInterface events.
*
* @param vmIp Corresponding Vm ip
* @param routerInterfaces Corresponding routerInterfaces
*/
public void removeL3Rules(Ip4Address vmIp, List<OpenstackRouterInterface> routerInterfaces) {
if (vmIp == null) {
return;
}
OpenstackRoutingService routingService = getService(OpenstackRoutingService.class);
deviceService.getAvailableDevices().forEach(d -> {
if (!d.equals(getGatewayNode())) {
routerInterfaces.forEach(routerInterface -> {
String networkId = routingService.networkIdForRouterInterface(routerInterface.portId());
long vni = getVni(networkId);
TrafficSelector.Builder sBuilder = DefaultTrafficSelector.builder();
sBuilder.matchEthType(Ethernet.TYPE_IPV4)
.matchIPDst(vmIp.toIpPrefix())
.matchTunnelId(vni);
removeRule(d.id(), sBuilder, ForwardingObjective.Flag.SPECIFIC, ROUTING_RULE_PRIORITY);
});
}
});
}
}
| |
package android.hardware;
/*
* #%L
* Matos
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2010 - 2014 Orange SA
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
@com.francetelecom.rd.stubs.annotation.ClassDone(0)
public class Camera
{
// Classes
public static class CameraInfo
{
// Fields
public static final int CAMERA_FACING_BACK = 0;
public static final int CAMERA_FACING_FRONT = 1;
public int facing;
public int orientation;
// Constructors
public CameraInfo(){
}
}
public static interface PreviewCallback
{
// Methods
@com.francetelecom.rd.stubs.annotation.CallBack("onPreview")
public void onPreviewFrame(byte [] arg1, Camera arg2);
}
public static interface AutoFocusCallback
{
// Methods
@com.francetelecom.rd.stubs.annotation.CallBack("onAutoFocus")
public void onAutoFocus(boolean arg1, Camera arg2);
}
public static interface ShutterCallback
{
// Methods
@com.francetelecom.rd.stubs.annotation.CallBack("onShutter")
public void onShutter();
}
public static interface PictureCallback
{
// Methods
@com.francetelecom.rd.stubs.annotation.CallBack("onPictureTaken")
public void onPictureTaken(byte [] arg1, Camera arg2);
}
public static interface OnZoomChangeListener
{
// Methods
@com.francetelecom.rd.stubs.annotation.CallBack("onZoomChange")
public void onZoomChange(int arg1, boolean arg2, Camera arg3);
}
public static interface FaceDetectionListener
{
// Methods
@com.francetelecom.rd.stubs.annotation.CallBack("onFaceDetection")
public void onFaceDetection(Camera.Face [] arg1, Camera arg2);
}
public static class Face
{
// Fields
public android.graphics.Rect rect;
public int score;
public int id;
public android.graphics.Point leftEye;
public android.graphics.Point rightEye;
public android.graphics.Point mouth;
// Constructors
public Face(){
}
}
public static interface ErrorCallback
{
// Methods
@com.francetelecom.rd.stubs.annotation.CallBack("onError")
public void onError(int arg1, Camera arg2);
}
public class Size
{
// Fields
public int width;
public int height;
// Constructors
public Size(int arg1, int arg2){
}
// Methods
public boolean equals(java.lang.Object arg1){
return false;
}
public int hashCode(){
return 0;
}
}
public static class Area
{
// Fields
public android.graphics.Rect rect;
public int weight;
// Constructors
public Area(android.graphics.Rect arg1, int arg2){
}
// Methods
public boolean equals(java.lang.Object arg1){
return false;
}
}
public class Parameters
{
// Fields
public static final java.lang.String WHITE_BALANCE_AUTO = "auto";
public static final java.lang.String WHITE_BALANCE_INCANDESCENT = "incandescent";
public static final java.lang.String WHITE_BALANCE_FLUORESCENT = "fluorescent";
public static final java.lang.String WHITE_BALANCE_WARM_FLUORESCENT = "warm-fluorescent";
public static final java.lang.String WHITE_BALANCE_DAYLIGHT = "daylight";
public static final java.lang.String WHITE_BALANCE_CLOUDY_DAYLIGHT = "cloudy-daylight";
public static final java.lang.String WHITE_BALANCE_TWILIGHT = "twilight";
public static final java.lang.String WHITE_BALANCE_SHADE = "shade";
public static final java.lang.String EFFECT_NONE = "none";
public static final java.lang.String EFFECT_MONO = "mono";
public static final java.lang.String EFFECT_NEGATIVE = "negative";
public static final java.lang.String EFFECT_SOLARIZE = "solarize";
public static final java.lang.String EFFECT_SEPIA = "sepia";
public static final java.lang.String EFFECT_POSTERIZE = "posterize";
public static final java.lang.String EFFECT_WHITEBOARD = "whiteboard";
public static final java.lang.String EFFECT_BLACKBOARD = "blackboard";
public static final java.lang.String EFFECT_AQUA = "aqua";
public static final java.lang.String ANTIBANDING_AUTO = "auto";
public static final java.lang.String ANTIBANDING_50HZ = "50hz";
public static final java.lang.String ANTIBANDING_60HZ = "60hz";
public static final java.lang.String ANTIBANDING_OFF = "off";
public static final java.lang.String FLASH_MODE_OFF = "off";
public static final java.lang.String FLASH_MODE_AUTO = "auto";
public static final java.lang.String FLASH_MODE_ON = "on";
public static final java.lang.String FLASH_MODE_RED_EYE = "red-eye";
public static final java.lang.String FLASH_MODE_TORCH = "torch";
public static final java.lang.String SCENE_MODE_AUTO = "auto";
public static final java.lang.String SCENE_MODE_ACTION = "action";
public static final java.lang.String SCENE_MODE_PORTRAIT = "portrait";
public static final java.lang.String SCENE_MODE_LANDSCAPE = "landscape";
public static final java.lang.String SCENE_MODE_NIGHT = "night";
public static final java.lang.String SCENE_MODE_NIGHT_PORTRAIT = "night-portrait";
public static final java.lang.String SCENE_MODE_THEATRE = "theatre";
public static final java.lang.String SCENE_MODE_BEACH = "beach";
public static final java.lang.String SCENE_MODE_SNOW = "snow";
public static final java.lang.String SCENE_MODE_SUNSET = "sunset";
public static final java.lang.String SCENE_MODE_STEADYPHOTO = "steadyphoto";
public static final java.lang.String SCENE_MODE_FIREWORKS = "fireworks";
public static final java.lang.String SCENE_MODE_SPORTS = "sports";
public static final java.lang.String SCENE_MODE_PARTY = "party";
public static final java.lang.String SCENE_MODE_CANDLELIGHT = "candlelight";
public static final java.lang.String SCENE_MODE_BARCODE = "barcode";
public static final java.lang.String FOCUS_MODE_AUTO = "auto";
public static final java.lang.String FOCUS_MODE_INFINITY = "infinity";
public static final java.lang.String FOCUS_MODE_MACRO = "macro";
public static final java.lang.String FOCUS_MODE_FIXED = "fixed";
public static final java.lang.String FOCUS_MODE_EDOF = "edof";
public static final java.lang.String FOCUS_MODE_CONTINUOUS_VIDEO = "continuous-video";
public static final java.lang.String FOCUS_MODE_CONTINUOUS_PICTURE = "continuous-picture";
public static final int FOCUS_DISTANCE_NEAR_INDEX = 0;
public static final int FOCUS_DISTANCE_OPTIMAL_INDEX = 1;
public static final int FOCUS_DISTANCE_FAR_INDEX = 2;
public static final int PREVIEW_FPS_MIN_INDEX = 0;
public static final int PREVIEW_FPS_MAX_INDEX = 1;
// Constructors
private Parameters(){
}
// Methods
public java.lang.String get(java.lang.String arg1){
return (java.lang.String) null;
}
public int getInt(java.lang.String arg1){
return 0;
}
public void remove(java.lang.String arg1){
}
public void set(java.lang.String arg1, java.lang.String arg2){
}
public void set(java.lang.String arg1, int arg2){
}
public void dump(){
}
public void setRotation(int arg1){
}
public void setZoom(int arg1){
}
public java.lang.String flatten(){
return (java.lang.String) null;
}
public void unflatten(java.lang.String arg1){
}
public void setPreviewSize(int arg1, int arg2){
}
public Camera.Size getPreviewSize(){
return (Camera.Size) null;
}
public java.util.List<Camera.Size> getSupportedPreviewSizes(){
return (java.util.List) null;
}
public java.util.List<Camera.Size> getSupportedVideoSizes(){
return (java.util.List) null;
}
public Camera.Size getPreferredPreviewSizeForVideo(){
return (Camera.Size) null;
}
public void setJpegThumbnailSize(int arg1, int arg2){
}
public Camera.Size getJpegThumbnailSize(){
return (Camera.Size) null;
}
public java.util.List<Camera.Size> getSupportedJpegThumbnailSizes(){
return (java.util.List) null;
}
public void setJpegThumbnailQuality(int arg1){
}
public int getJpegThumbnailQuality(){
return 0;
}
public void setJpegQuality(int arg1){
}
public int getJpegQuality(){
return 0;
}
public void setPreviewFrameRate(int arg1){
}
public int getPreviewFrameRate(){
return 0;
}
public java.util.List<java.lang.Integer> getSupportedPreviewFrameRates(){
return (java.util.List) null;
}
public void setPreviewFpsRange(int arg1, int arg2){
}
public void getPreviewFpsRange(int [] arg1){
}
public java.util.List<int []> getSupportedPreviewFpsRange(){
return (java.util.List) null;
}
public void setPreviewFormat(int arg1){
}
public int getPreviewFormat(){
return 0;
}
public java.util.List<java.lang.Integer> getSupportedPreviewFormats(){
return (java.util.List) null;
}
public void setPictureSize(int arg1, int arg2){
}
public Camera.Size getPictureSize(){
return (Camera.Size) null;
}
public java.util.List<Camera.Size> getSupportedPictureSizes(){
return (java.util.List) null;
}
public void setPictureFormat(int arg1){
}
public int getPictureFormat(){
return 0;
}
public java.util.List<java.lang.Integer> getSupportedPictureFormats(){
return (java.util.List) null;
}
public void setGpsLatitude(double arg1){
}
public void setGpsLongitude(double arg1){
}
public void setGpsAltitude(double arg1){
}
public void setGpsTimestamp(long arg1){
}
public void setGpsProcessingMethod(java.lang.String arg1){
}
public void removeGpsData(){
}
public java.lang.String getWhiteBalance(){
return (java.lang.String) null;
}
public void setWhiteBalance(java.lang.String arg1){
}
public java.util.List<java.lang.String> getSupportedWhiteBalance(){
return (java.util.List) null;
}
public java.lang.String getColorEffect(){
return (java.lang.String) null;
}
public void setColorEffect(java.lang.String arg1){
}
public java.util.List<java.lang.String> getSupportedColorEffects(){
return (java.util.List) null;
}
public java.lang.String getAntibanding(){
return (java.lang.String) null;
}
public void setAntibanding(java.lang.String arg1){
}
public java.util.List<java.lang.String> getSupportedAntibanding(){
return (java.util.List) null;
}
public java.lang.String getSceneMode(){
return (java.lang.String) null;
}
public void setSceneMode(java.lang.String arg1){
}
public java.util.List<java.lang.String> getSupportedSceneModes(){
return (java.util.List) null;
}
public java.lang.String getFlashMode(){
return (java.lang.String) null;
}
public void setFlashMode(java.lang.String arg1){
}
public java.util.List<java.lang.String> getSupportedFlashModes(){
return (java.util.List) null;
}
public java.lang.String getFocusMode(){
return (java.lang.String) null;
}
public void setFocusMode(java.lang.String arg1){
}
public java.util.List<java.lang.String> getSupportedFocusModes(){
return (java.util.List) null;
}
public float getFocalLength(){
return 0.0f;
}
public float getHorizontalViewAngle(){
return 0.0f;
}
public float getVerticalViewAngle(){
return 0.0f;
}
public int getExposureCompensation(){
return 0;
}
public void setExposureCompensation(int arg1){
}
public int getMaxExposureCompensation(){
return 0;
}
public int getMinExposureCompensation(){
return 0;
}
public float getExposureCompensationStep(){
return 0.0f;
}
public void setAutoExposureLock(boolean arg1){
}
public boolean getAutoExposureLock(){
return false;
}
public boolean isAutoExposureLockSupported(){
return false;
}
public void setAutoWhiteBalanceLock(boolean arg1){
}
public boolean getAutoWhiteBalanceLock(){
return false;
}
public boolean isAutoWhiteBalanceLockSupported(){
return false;
}
public int getZoom(){
return 0;
}
public boolean isZoomSupported(){
return false;
}
public int getMaxZoom(){
return 0;
}
public java.util.List<java.lang.Integer> getZoomRatios(){
return (java.util.List) null;
}
public boolean isSmoothZoomSupported(){
return false;
}
public void getFocusDistances(float [] arg1){
}
public int getMaxNumFocusAreas(){
return 0;
}
public java.util.List<Camera.Area> getFocusAreas(){
return (java.util.List) null;
}
public void setFocusAreas(java.util.List<Camera.Area> arg1){
}
public int getMaxNumMeteringAreas(){
return 0;
}
public java.util.List<Camera.Area> getMeteringAreas(){
return (java.util.List) null;
}
public void setMeteringAreas(java.util.List<Camera.Area> arg1){
}
public int getMaxNumDetectedFaces(){
return 0;
}
public void setRecordingHint(boolean arg1){
}
public boolean isVideoSnapshotSupported(){
return false;
}
public void setVideoStabilization(boolean arg1){
}
public boolean getVideoStabilization(){
return false;
}
public boolean isVideoStabilizationSupported(){
return false;
}
}
// Fields
public static final java.lang.String ACTION_NEW_PICTURE = "android.hardware.action.NEW_PICTURE";
public static final java.lang.String ACTION_NEW_VIDEO = "android.hardware.action.NEW_VIDEO";
public static final int CAMERA_ERROR_UNKNOWN = 1;
public static final int CAMERA_ERROR_SERVER_DIED = 100;
// Constructors
Camera(int arg1){
}
// Methods
protected void finalize(){
}
public final void lock(){
}
public final void release(){
}
public static Camera open(int arg1){
return (Camera) null;
}
public static Camera open(){
return (Camera) null;
}
public final void unlock(){
}
public Camera.Parameters getParameters(){
return (Camera.Parameters) null;
}
public final void reconnect() throws java.io.IOException{
}
public void setParameters(Camera.Parameters arg1){
}
public final void setDisplayOrientation(int arg1){
}
public final void setErrorCallback(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onError") Camera.ErrorCallback arg1){
}
public final void setPreviewDisplay(android.view.SurfaceHolder arg1) throws java.io.IOException{
}
public static int getNumberOfCameras(){
return 0;
}
public static void getCameraInfo(int arg1, Camera.CameraInfo arg2){
}
public final void setPreviewTexture(android.graphics.SurfaceTexture arg1) throws java.io.IOException{
}
@com.francetelecom.rd.stubs.annotation.UseRule(value = "Camera.startPreview", report = "-")
public final void startPreview(){
}
public final void stopPreview(){
}
public final boolean previewEnabled(){
return false;
}
public final void setPreviewCallback(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onPreview") Camera.PreviewCallback arg1){
}
public final void setOneShotPreviewCallback(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onPreview") Camera.PreviewCallback arg1){
}
public final void setPreviewCallbackWithBuffer(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onPreview") Camera.PreviewCallback arg1){
}
public final void addCallbackBuffer(byte [] arg1){
}
public final void addRawImageCallbackBuffer(byte [] arg1){
}
public final void autoFocus(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onAutoFocus") Camera.AutoFocusCallback arg1){
}
public final void cancelAutoFocus(){
}
@com.francetelecom.rd.stubs.annotation.UseRule(value = "Camera.takePicture-1", report = "-")
public final void takePicture(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onShutter") Camera.ShutterCallback arg1, @com.francetelecom.rd.stubs.annotation.CallBackRegister("onPictureTaken") Camera.PictureCallback arg2, @com.francetelecom.rd.stubs.annotation.CallBackRegister("onPictureTaken") Camera.PictureCallback arg3){
}
@com.francetelecom.rd.stubs.annotation.UseRule(value = "Camera.takePicture-2", report = "-")
public final void takePicture(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onShutter") Camera.ShutterCallback arg1, @com.francetelecom.rd.stubs.annotation.CallBackRegister("onPictureTaken") Camera.PictureCallback arg2, @com.francetelecom.rd.stubs.annotation.CallBackRegister("onPictureTaken") Camera.PictureCallback arg3, @com.francetelecom.rd.stubs.annotation.CallBackRegister("onPictureTaken") Camera.PictureCallback arg4){
}
public final void startSmoothZoom(int arg1){
}
public final void stopSmoothZoom(){
}
public final void setZoomChangeListener(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onZoomChange") Camera.OnZoomChangeListener arg1){
}
public final void setFaceDetectionListener(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onFaceDetection") Camera.FaceDetectionListener arg1){
}
public final void startFaceDetection(){
}
public final void stopFaceDetection(){
}
}
| |
/* ***** BEGIN LICENSE BLOCK *****
Version: Apache 2.0/GPL 3.0/LGPL 3.0
CCT - Computational Chemistry Tools
Jamberoo - Java Molecules Editor
Copyright 2008-2015 Dr. Vladislav Vasilyev
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributor(s):
Dr. Vladislav Vasilyev <vvv900@gmail.com> (original author)
Alternatively, the contents of this file may be used under the terms of
either the GNU General Public License Version 2 or later (the "GPL"), or
the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
in which case the provisions of the GPL or the LGPL are applicable instead
of those above. If you wish to allow use of your version of this file only
under the terms of either the GPL or the LGPL, and not to allow others to
use your version of this file under the terms of the Apache 2.0, indicate your
decision by deleting the provisions above and replace them with the notice
and other provisions required by the GPL or the LGPL. If you do not delete
the provisions above, a recipient may use your version of this file under
the terms of any one of the Apache 2.0, the GPL or the LGPL.
***** END LICENSE BLOCK *****/
package cct.modelling;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import cct.interfaces.AtomInterface;
import cct.interfaces.BondInterface;
import cct.interfaces.ForceFieldInterface;
import cct.interfaces.MoleculeInterface;
import cct.sff.BondParam;
import cct.sff.SimpleForceField;
import cct.vecmath.Point3f;
import java.io.StringWriter;
import java.util.Map;
import java.util.logging.Level;
/**
* <p>Title: </p>
*
* <p>Description: </p>
*
* <p>Copyright: Copyright (c) 2006</p>
*
* <p>Company: ANU</p>
*
* @author Dr. V. Vasilyev
* @version 1.0
*/
public class FFMolecule {
static final Logger logger = Logger.getLogger(FFMolecule.class.getCanonicalName());
static public final int HARMONIC_BOND_TERM = 0;
static public float defaultFk = 300.0f;
protected MoleculeInterface Molecule = null;
// --- Internal energy related variables
protected Object BondStretchInteractions[] = null;
protected Object AngleBendInteractions[] = null;
protected boolean Calculate_1_4 = true;
protected ForceFieldInterface forceField;
List Torsions = null;
List torsionInteractions = null;
List oneFourInteractions = null;
List nonbodedPairs = null;
List<MolecularPlane> surfaces = null;
Nonbonded15Table nonbondedTable = null;
float SCNB = 1, SCEE = 1;
private FFMolecule() {
}
public FFMolecule(MoleculeInterface molec) {
Molecule = molec;
}
public void applyForceField(ForceFieldInterface ff) {
forceField = ff;
SCNB = 1.0f / ff.get14NBScale();
SCEE = 1.0f / ff.get14ElsScale();
}
public boolean isCalculate14() {
return Calculate_1_4;
}
public List get14Interactions() {
return oneFourInteractions;
}
public List getTorsionInteractions() {
return torsionInteractions;
}
public List getNonbondInteractions() {
return nonbodedPairs;
}
public Object[] getBondStretchInteractions() {
return BondStretchInteractions;
}
public Object[] getAngleBendInteractions() {
return AngleBendInteractions;
}
public MoleculeInterface getMolecule() {
return Molecule;
}
public void formFFParameters() throws Exception {
long start = 0;
double secs;
// --- Form Bond stretch parameters
try {
int n = 0;
n = formBondStretchInteractions(HARMONIC_BOND_TERM, MolecularEnergy.SFF_PARAMETERS);
} catch (Exception ex) {
throw ex;
}
// --- Form angle bend parameters
start = System.currentTimeMillis();
AngleBendsArray angles = findDynamicAngles(Molecule);
secs = (System.currentTimeMillis() - start) / 1000.0;
logger.info("Number of angle bend interactions: " + angles.getSize() + " Elapsed time: "
+ String.format("%10.4f secs", secs));
start = System.currentTimeMillis();
formAngleBendInteractions(MolecularEnergy.SFF_PARAMETERS, angles);
secs = (System.currentTimeMillis() - start) / 1000.0;
logger.info(" Elapsed time for forming angle bend parameters: " + String.format("%10.4f secs", secs));
// --- Form regular torsions
start = System.currentTimeMillis();
Torsions = findDynamicTorsions(Molecule);
secs = (System.currentTimeMillis() - start) / 1000.0;
logger.info("Number of torsions: " + Torsions.size() + " Elapsed time: "
+ String.format("%10.4f secs", secs));
if (logger.isLoggable(Level.INFO)) {
StringWriter sr = new StringWriter();
for (int i = 0; i < Torsions.size(); i++) {
Torsion torsion = (Torsion) Torsions.get(i);
sr.write(String.format("%3d - %3d - %3d - %3d\n", torsion.ijkl[0], torsion.ijkl[1], torsion.ijkl[2], torsion.ijkl[3]));
}
logger.info(sr.toString());
sr = null;
}
start = System.currentTimeMillis();
torsionInteractions = formTorsionInteractions(MolecularEnergy.SFF_PARAMETERS, Torsions);
secs = (System.currentTimeMillis() - start) / 1000.0;
logger.info(" Elapsed time for assigning torsion parameters: " + String.format("%10.4f secs", secs));
// --- Form 1-4 pairs...
if (Calculate_1_4) {
start = System.currentTimeMillis();
oneFourInteractions = find14Interactions(Molecule, Torsions);
secs = (System.currentTimeMillis() - start) / 1000.0;
logger.info("Number of 1-4 interactions: " + oneFourInteractions.size() + " Elapsed time: "
+ String.format("%10.4f secs", secs));
start = System.currentTimeMillis();
setupNonbondedInteractions(MolecularEnergy.SFF_PARAMETERS, oneFourInteractions);
secs = (System.currentTimeMillis() - start) / 1000.0;
logger.info(" Elapsed time for assigning 1-4 NB parameters: " + String.format("%10.4f secs", secs));
}
// --- Form 1>4 interactions
start = System.currentTimeMillis();
nonbondedTable = new Nonbonded15Table(Molecule.getNumberOfAtoms());
BondStretchPairs bondPairs = (BondStretchPairs) BondStretchInteractions[1];
List bsp = bondPairs.getBondStretchPairs();
for (int i = 0; i < bsp.size(); i++) {
HarmonicBondStretch hbs = (HarmonicBondStretch) bsp.get(i);
nonbondedTable.set15(hbs.i, hbs.j, false);
}
for (int i = 0; i < angles.getSize(); i++) {
AngleBend ab = angles.getAngleBend(i);
nonbondedTable.set15(ab.getI(), ab.getK(), false);
}
if (Calculate_1_4) {
for (int i = 0; i < oneFourInteractions.size(); i++) {
NonbondedPair nb = (NonbondedPair) oneFourInteractions.get(i);
nonbondedTable.set15(nb.ij[0], nb.ij[1], false);
}
}
nonbodedPairs = findNonbondedPairs(Molecule, nonbondedTable);
secs = (System.currentTimeMillis() - start) / 1000.0;
logger.info("Number of nonbonded interactions: " + nonbodedPairs.size() + " Elapsed time: "
+ String.format("%10.4f secs", secs));
start = System.currentTimeMillis();
setupNonbondedInteractions(MolecularEnergy.SFF_PARAMETERS, nonbodedPairs);
secs = (System.currentTimeMillis() - start) / 1000.0;
logger.info(" Elapsed time for assigning 1>4 NB parameters: " + String.format("%10.4f secs", secs));
// --- Surface parameters, if any
Object s = Molecule.getProperty(MoleculeInterface.SurfacesProperty);
if (s != null) {
System.out.println("Molecule has surface property of a class " + s.getClass().getCanonicalName());
surfaces = new ArrayList<MolecularPlane>();
try {
Map ss = (Map) s;
System.out.println("Number of surface(s): " + ss.size());
for (Object key : ss.keySet()) {
String name = (String) key;
MolecularPlane plane = (MolecularPlane) ss.get(key);
System.out.println(" Surface " + name);
forceField.getSurfaceParam(plane);
System.out.println(" Surface Params " + plane.getSurfaceParameter().getR() + " & " + plane.getSurfaceParameter().getW());
setupNonbondedInteractions(Molecule, plane);
surfaces.add(plane);
System.out.println("Plane " + name + ": " + plane);
}
} catch (Exception ex) {
logger.severe("Error Getting surface(s) from molecule: " + ex.getMessage());
}
}
}
/**
*
* @param paramType int
* @param intType int- type of bond stretch interactions: 0 - harmonic K*(r-r0)
* @return int - number of bond stretch interactions
* @throws Exception
*/
public int formBondStretchInteractions(int paramType, int intType) throws
Exception {
int n = 0;
if (Molecule == null) {
throw new Exception("Forming Bond Stretch Interactions: null pointer for molecule");
}
if (paramType != MolecularEnergy.SFF_PARAMETERS) {
throw new Exception("Forming Bond Stretch Interactions: unknown type of parameters");
}
if (intType != HARMONIC_BOND_TERM) {
throw new Exception("Forming Bond Stretch Interactions: unknown type of bond interactions");
}
// --- Go through all bonds
long start = System.currentTimeMillis();
if (BondStretchInteractions != null) {
BondStretchInteractions = null;
}
BondStretchInteractions = new Object[2];
BondStretchPairs bondPairs = new BondStretchPairs();
if (intType == HARMONIC_BOND_TERM
&& paramType == MolecularEnergy.SFF_PARAMETERS) {
BondStretchInteractions[0] = new Integer(HARMONIC_BOND_TERM);
for (int i = 0; i < Molecule.getNumberOfBonds(); i++) {
BondInterface bond = Molecule.getBondInterface(i);
AtomInterface a1 = bond.getIAtomInterface();
AtomInterface a2 = bond.getJAtomInterface();
if (a1.isDynamic() || a2.isDynamic()) {
int index_1 = Molecule.getAtomIndex(a1);
int index_2 = Molecule.getAtomIndex(a2);
String type_1 = (String) a1.getProperty(AtomInterface.CCT_ATOM_TYPE);
if (type_1 == null) {
type_1 = cct.modelling.Molecule.guessAtomType(a1, AtomInterface.CCT_ATOM_TYPE, CCTAtomTypes.getElementMapping());
}
String type_2 = (String) a2.getProperty(AtomInterface.CCT_ATOM_TYPE);
if (type_2 == null) {
type_2 = cct.modelling.Molecule.guessAtomType(a2, AtomInterface.CCT_ATOM_TYPE, CCTAtomTypes.getElementMapping());
}
if (type_1 == null || type_2 == null) {
float r0 = (float) Point3f.distance(a1, a2);
HarmonicBondStretch hbs = new HarmonicBondStretch(index_1, index_2, defaultFk, r0);
bondPairs.addBondPair(hbs);
} else {
BondParam bp = forceField.getBondStretchParam(type_1, type_2);
HarmonicBondStretch hbs = new HarmonicBondStretch(index_1, index_2, bp.Fk, bp.R0);
bondPairs.addBondPair(hbs);
}
}
}
BondStretchInteractions[1] = bondPairs;
float secs = (System.currentTimeMillis() - start) / 1000.0f;
logger.info("Number of bond stretch interactions: " + bondPairs.getSize() + " Elapsed time: " + secs + " secs");
List pairs = bondPairs.getBondStretchPairs();
if (logger.isLoggable(Level.INFO)) {
StringWriter sr = new StringWriter();
for (int i = 0; i < pairs.size(); i++) {
HarmonicBondStretch hbs = (HarmonicBondStretch) pairs.get(i);
sr.write((i + 1) + " : " + hbs.i + " " + hbs.j + " " + hbs.Fk + " " + hbs.R0 + "\n");
}
logger.info(sr.toString());
sr = null;
}
}
return n;
}
public void setupNonbondedInteractions(int paramType, List nonbonded) throws Exception {
if (nonbonded == null) {
return;
}
if (paramType != MolecularEnergy.SFF_PARAMETERS) {
throw new Exception("setupNonbondedInteractions: unknown type of parameters");
}
if (paramType == MolecularEnergy.SFF_PARAMETERS) {
SimpleForceField.setupNonbondedParams(Molecule, nonbonded);
}
}
public void setupNonbondedInteractions(MoleculeInterface mol, MolecularPlane plane) throws Exception {
SimpleForceField.setupNonbondedParams(Molecule, plane);
}
public int formAngleBendInteractions(int paramType, AngleBendsArray angles) throws
Exception {
if (Molecule == null) {
throw new Exception("formAngleBendInteractions: null pointer for molecule");
}
if (paramType != MolecularEnergy.SFF_PARAMETERS) {
throw new Exception("formAngleBendInteractions: unknown type of parameters");
}
if (AngleBendInteractions != null) {
AngleBendInteractions = null;
}
AngleBendInteractions = new Object[2];
if (paramType == MolecularEnergy.SFF_PARAMETERS) {
AngleBendInteractions[0] = new Integer(HARMONIC_BOND_TERM);
forceField.getAngleBendParams(Molecule, angles);
AngleBendInteractions[1] = angles;
if (logger.isLoggable(Level.INFO)) {
StringWriter sr = new StringWriter();
logger.info("Number of angle bend interactions: "
+ angles.getSize() + "\nAngle Bend Parameters: ");
for (int i = 0; i < angles.getSize(); i++) {
AngleBend ab = angles.getAngleBend(i);
sr.write((i + 1) + " : " + ab.getI() + " " + ab.getJ()
+ " " + ab.getK() + " "
+ (ab.Fk / SimpleForceField.FK_GRAD_2_RAD_FACTOR)
+ " "
+ (ab.Theta * (float) Point3f.RADIANS_TO_DEGREES) + "\n");
}
logger.info(sr.toString());
sr = null;
}
}
return angles.getSize();
}
public List formTorsionInteractions(int paramType, List torsions) throws
Exception {
if (Molecule == null) {
throw new Exception("formTorsionInteractions: null pointer for molecule");
}
if (paramType != MolecularEnergy.SFF_PARAMETERS) {
throw new Exception("formTorsionInteractions: unknown type of parameters");
}
if (paramType == MolecularEnergy.SFF_PARAMETERS) {
torsionInteractions = forceField.getTorsionParams(Molecule, Torsions);
if (logger.isLoggable(Level.INFO)) { // 800 is for the INFO level
StringWriter sr = new StringWriter();
sr.write("Number of torsion interactions: " + torsionInteractions.size()
+ "\nTorsion Parameters: \n");
for (int i = 0; i < torsionInteractions.size(); i++) {
TorsionParameter tp = (TorsionParameter) torsionInteractions.get(i);
sr.write((i + 1) + " : " + tp.getI() + " " + tp.getJ() + " " + tp.getK() + " " + tp.getL() + " ");
List serie = tp.getSerie();
for (int j = 0; j < serie.size(); j++) {
TorsionTerm tt = (TorsionTerm) serie.get(j);
sr.write(tt.V + " " + tt.Periodicity + " " + (tt.Phase * (float) Point3f.RADIANS_TO_DEGREES));
}
sr.write("\n");
}
logger.info(sr.toString());
sr.close();
sr = null;
}
}
return torsionInteractions;
}
/**
* Finds all valent angles where at least one atom forming angle is dynamic
*
* @param molec MoleculeInterface - Molecule
* @return AngleBendsArray
*/
public static AngleBendsArray findDynamicAngles(MoleculeInterface molec) {
AngleBendsArray aba = new AngleBendsArray();
if (molec.getNumberOfAtoms() < 3) {
return aba;
}
for (int i = 0; i < molec.getNumberOfAtoms(); i++) {
AtomInterface a1 = molec.getAtomInterface(i);
int index_1 = molec.getAtomIndex(a1);
List a1_list = a1.getBondedToAtoms();
for (int j = 0; j < a1_list.size(); j++) {
AtomInterface a2 = (AtomInterface) a1_list.get(j);
int index_2 = molec.getAtomIndex(a2);
List a2_list = a2.getBondedToAtoms();
for (int k = 0; k < a2_list.size(); k++) {
AtomInterface a3 = (AtomInterface) a2_list.get(k);
if (a1.isDynamic() || a2.isDynamic() || a3.isDynamic()) {
int index_3 = molec.getAtomIndex(a3);
if (a1 == a3) {
continue;
}
if (index_3 < index_1) {
continue; // always go in descending order
}
// Check for 3-member ring
if (a1.getBondToAtom(a3) != null) {
continue;
}
AngleBend ab = new AngleBend(index_1, index_2, index_3, -1,
-1);
aba.addAngleBend(ab);
}
}
}
}
//logger.info("Number of angle bend interactions: " + aba.getSize());
if (logger.isLoggable(Level.INFO)) {
StringWriter sr = new StringWriter();
for (int i = 0; i < aba.getSize(); i++) {
if (i == 0) {
sr.write("\nAngles: \n");
}
AngleBend ab = aba.getAngleBend(i);
sr.write((i + 1) + " : " + ab.ijk[0] + " " + ab.ijk[1]
+ " "
+ ab.ijk[2] + " " + ab.Fk + " " + ab.Theta + "\n");
}
logger.info(sr.toString());
try {
sr.close();
} catch (Exception ex) {
}
sr = null;
}
return aba;
}
/**
* Finds all regular torsions in molecule
*
* @param molec MoleculeInterface - Molecule
* @return ArrayList - array of class Torsion
*/
public static List findDynamicTorsions(MoleculeInterface molec) {
List torsions = new ArrayList();
for (int i = 0; i < molec.getNumberOfAtoms(); i++) {
//at[0] = i;
AtomInterface a1 = molec.getAtomInterface(i);
int index_1 = molec.getAtomIndex(a1);
List a1_list = a1.getBondedToAtoms();
for (int j = 0; j < a1_list.size(); j++) {
//a2 = atom[i].br[j];
AtomInterface a2 = (AtomInterface) a1_list.get(j);
int index_2 = molec.getAtomIndex(a2);
List a2_list = a2.getBondedToAtoms();
for (int k = 0; k < a2_list.size(); k++) {
//a3 = atom[a2].br[k];
AtomInterface a3 = (AtomInterface) a2_list.get(k);
int index_3 = molec.getAtomIndex(a3);
List a3_list = a3.getBondedToAtoms();
//if (a3 == i) {
if (a3 == a1) {
continue;
}
for (int l = 0; l < a3_list.size(); l++) {
//a4 = atom[a3].br[l];
AtomInterface a4 = (AtomInterface) a3_list.get(l);
if (a4 == a2 || a4 == a1) {
continue;
}
if (!a1.isDynamic() && !a2.isDynamic() && !a3.isDynamic()
&& !a4.isDynamic()) {
continue;
}
// Check for 4-member ring
if (a1.getBondToAtom(a4) != null) {
continue;
}
int index_4 = molec.getAtomIndex(a4);
//if (a4 < i) {
if (index_4 < index_1) {
continue; // always go in descending order
}
Torsion torsion = new Torsion(index_1, index_2, index_3,
index_4);
torsions.add(torsion);
//printf("\n%3d - %3d - %3d - %3d", at[0], at[1], at[2], at[3]);
}
}
}
}
return torsions;
}
public static List find14Interactions(MoleculeInterface molec, List torsions) {
List int14 = new ArrayList();
for (int i = 0; i < torsions.size(); i++) {
Torsion tor = (Torsion) torsions.get(i);
AtomInterface a1 = molec.getAtomInterface(tor.ijkl[0]);
AtomInterface a4 = molec.getAtomInterface(tor.ijkl[3]);
if ((!a1.isDynamic()) && (!a4.isDynamic())) {
continue;
}
NonbondedPair nb = new NonbondedPair(tor.ijkl[0], tor.ijkl[3]);
int14.add(nb);
}
return int14;
}
public static List findNonbondedPairs(MoleculeInterface molec, Nonbonded15Table nbTable) {
List nb = new ArrayList();
for (int i = 1; i < molec.getNumberOfAtoms(); i++) {
AtomInterface a1 = molec.getAtomInterface(i);
for (int j = 0; j < i; j++) {
AtomInterface a2 = molec.getAtomInterface(j);
if (!a1.isDynamic() && !a2.isDynamic()) {
continue;
}
if (nbTable.get15(i, j)) {
NonbondedPair nbp = new NonbondedPair(molec.getAtomIndex(a1), molec.getAtomIndex(a2));
nb.add(nbp);
}
}
}
return nb;
}
public static void main(String[] args) {
FFMolecule ffmolecule = new FFMolecule();
}
public List<MolecularPlane> getSurfaces() {
return surfaces;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sqlexec;
import org.apache.flink.sql.parser.ddl.SqlAlterDatabase;
import org.apache.flink.sql.parser.ddl.SqlAlterFunction;
import org.apache.flink.sql.parser.ddl.SqlAlterTable;
import org.apache.flink.sql.parser.ddl.SqlAlterTableProperties;
import org.apache.flink.sql.parser.ddl.SqlAlterTableRename;
import org.apache.flink.sql.parser.ddl.SqlCreateDatabase;
import org.apache.flink.sql.parser.ddl.SqlCreateFunction;
import org.apache.flink.sql.parser.ddl.SqlCreateTable;
import org.apache.flink.sql.parser.ddl.SqlCreateView;
import org.apache.flink.sql.parser.ddl.SqlDropDatabase;
import org.apache.flink.sql.parser.ddl.SqlDropFunction;
import org.apache.flink.sql.parser.ddl.SqlDropTable;
import org.apache.flink.sql.parser.ddl.SqlDropView;
import org.apache.flink.sql.parser.ddl.SqlTableColumn;
import org.apache.flink.sql.parser.ddl.SqlTableOption;
import org.apache.flink.sql.parser.ddl.SqlUseCatalog;
import org.apache.flink.sql.parser.ddl.SqlUseDatabase;
import org.apache.flink.sql.parser.dml.RichSqlInsert;
import org.apache.flink.sql.parser.dql.SqlShowCatalogs;
import org.apache.flink.sql.parser.dql.SqlShowDatabases;
import org.apache.flink.sql.parser.dql.SqlShowFunctions;
import org.apache.flink.sql.parser.dql.SqlShowTables;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.calcite.FlinkPlannerImpl;
import org.apache.flink.table.calcite.FlinkTypeFactory;
import org.apache.flink.table.catalog.Catalog;
import org.apache.flink.table.catalog.CatalogDatabase;
import org.apache.flink.table.catalog.CatalogDatabaseImpl;
import org.apache.flink.table.catalog.CatalogFunction;
import org.apache.flink.table.catalog.CatalogFunctionImpl;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.CatalogTable;
import org.apache.flink.table.catalog.CatalogTableImpl;
import org.apache.flink.table.catalog.CatalogView;
import org.apache.flink.table.catalog.CatalogViewImpl;
import org.apache.flink.table.catalog.FunctionLanguage;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.UnresolvedIdentifier;
import org.apache.flink.table.catalog.exceptions.DatabaseNotExistException;
import org.apache.flink.table.operations.CatalogSinkModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.operations.PlannerQueryOperation;
import org.apache.flink.table.operations.ShowCatalogsOperation;
import org.apache.flink.table.operations.ShowDatabasesOperation;
import org.apache.flink.table.operations.ShowFunctionsOperation;
import org.apache.flink.table.operations.ShowTablesOperation;
import org.apache.flink.table.operations.UseCatalogOperation;
import org.apache.flink.table.operations.UseDatabaseOperation;
import org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation;
import org.apache.flink.table.operations.ddl.AlterDatabaseOperation;
import org.apache.flink.table.operations.ddl.AlterTablePropertiesOperation;
import org.apache.flink.table.operations.ddl.AlterTableRenameOperation;
import org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation;
import org.apache.flink.table.operations.ddl.CreateDatabaseOperation;
import org.apache.flink.table.operations.ddl.CreateTableOperation;
import org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation;
import org.apache.flink.table.operations.ddl.CreateViewOperation;
import org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation;
import org.apache.flink.table.operations.ddl.DropDatabaseOperation;
import org.apache.flink.table.operations.ddl.DropTableOperation;
import org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation;
import org.apache.flink.table.operations.ddl.DropViewOperation;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.utils.TypeConversions;
import org.apache.flink.util.StringUtils;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.dialect.CalciteSqlDialect;
import org.apache.calcite.sql.parser.SqlParser;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* Mix-in tool class for {@code SqlNode} that allows DDL commands to be
* converted to {@link Operation}.
*
* <p>For every kind of {@link SqlNode}, there needs to have a corresponding
* #convert(type) method, the 'type' argument should be the subclass
* of the supported {@link SqlNode}.
*
* <p>Every #convert() should return a {@link Operation} which can be used in
* {@link org.apache.flink.table.delegation.Planner}.
*/
public class SqlToOperationConverter {
private FlinkPlannerImpl flinkPlanner;
private CatalogManager catalogManager;
//~ Constructors -----------------------------------------------------------
private SqlToOperationConverter(
FlinkPlannerImpl flinkPlanner,
CatalogManager catalogManager) {
this.flinkPlanner = flinkPlanner;
this.catalogManager = catalogManager;
}
/**
* This is the main entrance for executing all kinds of DDL/DML {@code SqlNode}s, different
* SqlNode will have it's implementation in the #convert(type) method whose 'type' argument
* is subclass of {@code SqlNode}.
*
* @param flinkPlanner FlinkPlannerImpl to convert sql node to rel node
* @param sqlNode SqlNode to execute on
*/
public static Optional<Operation> convert(
FlinkPlannerImpl flinkPlanner,
CatalogManager catalogManager,
SqlNode sqlNode) {
// validate the query
final SqlNode validated = flinkPlanner.validate(sqlNode);
SqlToOperationConverter converter = new SqlToOperationConverter(flinkPlanner, catalogManager);
if (validated instanceof SqlCreateTable) {
return Optional.of(converter.convertCreateTable((SqlCreateTable) validated));
} else if (validated instanceof SqlDropTable) {
return Optional.of(converter.convertDropTable((SqlDropTable) validated));
} else if (validated instanceof SqlAlterTable) {
return Optional.of(converter.convertAlterTable((SqlAlterTable) validated));
} else if (validated instanceof SqlCreateFunction) {
return Optional.of(converter.convertCreateFunction((SqlCreateFunction) validated));
} else if (validated instanceof SqlAlterFunction) {
return Optional.of(converter.convertAlterFunction((SqlAlterFunction) validated));
} else if (validated instanceof SqlDropFunction) {
return Optional.of(converter.convertDropFunction((SqlDropFunction) validated));
} else if (validated instanceof RichSqlInsert) {
SqlNodeList targetColumnList = ((RichSqlInsert) validated).getTargetColumnList();
if (targetColumnList != null && targetColumnList.size() != 0) {
throw new ValidationException("Partial inserts are not supported");
}
return Optional.of(converter.convertSqlInsert((RichSqlInsert) validated));
} else if (validated instanceof SqlUseCatalog) {
return Optional.of(converter.convertUseCatalog((SqlUseCatalog) validated));
} else if (validated instanceof SqlUseDatabase) {
return Optional.of(converter.convertUseDatabase((SqlUseDatabase) validated));
} else if (validated instanceof SqlCreateDatabase) {
return Optional.of(converter.convertCreateDatabase((SqlCreateDatabase) validated));
} else if (validated instanceof SqlDropDatabase) {
return Optional.of(converter.convertDropDatabase((SqlDropDatabase) validated));
} else if (validated instanceof SqlAlterDatabase) {
return Optional.of(converter.convertAlterDatabase((SqlAlterDatabase) validated));
} else if (validated instanceof SqlShowCatalogs) {
return Optional.of(converter.convertShowCatalogs((SqlShowCatalogs) validated));
} else if (validated instanceof SqlShowDatabases) {
return Optional.of(converter.convertShowDatabases((SqlShowDatabases) validated));
} else if (validated instanceof SqlShowTables) {
return Optional.of(converter.convertShowTables((SqlShowTables) validated));
} else if (validated instanceof SqlShowFunctions) {
return Optional.of(converter.convertShowFunctions((SqlShowFunctions) validated));
} else if (validated instanceof SqlCreateView) {
return Optional.of(converter.convertCreateView((SqlCreateView) validated));
} else if (validated instanceof SqlDropView) {
return Optional.of(converter.convertDropView((SqlDropView) validated));
} else if (validated.getKind().belongsTo(SqlKind.QUERY)) {
return Optional.of(converter.convertSqlQuery(validated));
} else {
return Optional.empty();
}
}
//~ Tools ------------------------------------------------------------------
/**
* Convert the {@link SqlCreateTable} node.
*/
private Operation convertCreateTable(SqlCreateTable sqlCreateTable) {
// primary key and unique keys are not supported
if ((sqlCreateTable.getPrimaryKeyList().size() > 0)
|| (sqlCreateTable.getUniqueKeysList().size() > 0)) {
throw new SqlConversionException("Primary key and unique key are not supported yet.");
}
if (sqlCreateTable.getWatermark().isPresent()) {
throw new SqlConversionException(
"Watermark statement is not supported in Old Planner, please use Blink Planner instead.");
}
// set with properties
Map<String, String> properties = new HashMap<>();
sqlCreateTable.getPropertyList().getList().forEach(p ->
properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
TableSchema tableSchema = createTableSchema(sqlCreateTable);
String tableComment = sqlCreateTable.getComment().map(comment ->
comment.getNlsString().getValue()).orElse(null);
// set partition key
List<String> partitionKeys = sqlCreateTable.getPartitionKeyList()
.getList()
.stream()
.map(p -> ((SqlIdentifier) p).getSimple())
.collect(Collectors.toList());
CatalogTable catalogTable = new CatalogTableImpl(tableSchema,
partitionKeys,
properties,
tableComment);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlCreateTable.fullTableName());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new CreateTableOperation(
identifier,
catalogTable,
sqlCreateTable.isIfNotExists());
}
/** Convert DROP TABLE statement. */
private Operation convertDropTable(SqlDropTable sqlDropTable) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlDropTable.fullTableName());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new DropTableOperation(identifier, sqlDropTable.getIfExists());
}
/** convert ALTER TABLE statement. */
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
if (sqlAlterTable instanceof SqlAlterTableRename) {
UnresolvedIdentifier newUnresolvedIdentifier =
UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
} else if (sqlAlterTable instanceof SqlAlterTableProperties) {
Optional<CatalogManager.TableLookupResult> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
if (optionalCatalogTable.isPresent() && !optionalCatalogTable.get().isTemporary()) {
CatalogTable originalCatalogTable = (CatalogTable) optionalCatalogTable.get().getTable();
Map<String, String> properties = new HashMap<>();
properties.putAll(originalCatalogTable.getProperties());
((SqlAlterTableProperties) sqlAlterTable).getPropertyList().getList().forEach(p ->
properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogTable catalogTable = new CatalogTableImpl(
originalCatalogTable.getSchema(),
originalCatalogTable.getPartitionKeys(),
properties,
originalCatalogTable.getComment());
return new AlterTablePropertiesOperation(tableIdentifier, catalogTable);
} else {
throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.",
tableIdentifier.toString()));
}
}
return null;
}
/** Convert CREATE FUNCTION statement. */
private Operation convertCreateFunction(SqlCreateFunction sqlCreateFunction) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlCreateFunction.getFunctionIdentifier());
if (sqlCreateFunction.isSystemFunction()) {
return new CreateTempSystemFunctionOperation(
unresolvedIdentifier.getObjectName(),
sqlCreateFunction.getFunctionClassName().getValueAs(String.class),
sqlCreateFunction.isIfNotExists()
);
} else {
FunctionLanguage language = parseLanguage(sqlCreateFunction.getFunctionLanguage());
CatalogFunction catalogFunction = new CatalogFunctionImpl(
sqlCreateFunction.getFunctionClassName().getValueAs(String.class),
language);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new CreateCatalogFunctionOperation(
identifier,
catalogFunction,
sqlCreateFunction.isIfNotExists(),
sqlCreateFunction.isTemporary()
);
}
}
/** Convert ALTER FUNCTION statement. */
private Operation convertAlterFunction(SqlAlterFunction sqlAlterFunction) {
if (sqlAlterFunction.isSystemFunction()) {
throw new ValidationException("Alter temporary system function is not supported");
}
FunctionLanguage language = parseLanguage(sqlAlterFunction.getFunctionLanguage());
CatalogFunction catalogFunction = new CatalogFunctionImpl(
sqlAlterFunction.getFunctionClassName().getValueAs(String.class),
language);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterFunction.getFunctionIdentifier());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new AlterCatalogFunctionOperation(
identifier,
catalogFunction,
sqlAlterFunction.isIfExists(),
sqlAlterFunction.isTemporary()
);
}
/** Convert DROP FUNCTION statement. */
private Operation convertDropFunction(SqlDropFunction sqlDropFunction) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlDropFunction.getFunctionIdentifier());
if (sqlDropFunction.isSystemFunction()) {
return new DropTempSystemFunctionOperation(
unresolvedIdentifier.getObjectName(),
sqlDropFunction.getIfExists()
);
} else {
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new DropCatalogFunctionOperation(
identifier,
sqlDropFunction.getIfExists(),
sqlDropFunction.isTemporary()
);
}
}
/** Fallback method for sql query. */
private Operation convertSqlQuery(SqlNode node) {
return toQueryOperation(flinkPlanner, node);
}
/** Convert insert into statement. */
private Operation convertSqlInsert(RichSqlInsert insert) {
// get name of sink table
List<String> targetTablePath = ((SqlIdentifier) insert.getTargetTable()).names;
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(targetTablePath);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
PlannerQueryOperation query = (PlannerQueryOperation) SqlToOperationConverter.convert(
flinkPlanner,
catalogManager,
insert.getSource())
.orElseThrow(() -> new TableException(
"Unsupported node type " + insert.getSource().getClass().getSimpleName()));
return new CatalogSinkModifyOperation(
identifier,
query,
insert.getStaticPartitionKVs(),
insert.isOverwrite(),
Collections.emptyMap());
}
/** Convert use catalog statement. */
private Operation convertUseCatalog(SqlUseCatalog useCatalog) {
return new UseCatalogOperation(useCatalog.getCatalogName());
}
/** Convert use database statement. */
private Operation convertUseDatabase(SqlUseDatabase useDatabase) {
String[] fullDatabaseName = useDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new SqlConversionException("use database identifier format error");
}
String catalogName = fullDatabaseName.length == 2 ? fullDatabaseName[0] : catalogManager.getCurrentCatalog();
String databaseName = fullDatabaseName.length == 2 ? fullDatabaseName[1] : fullDatabaseName[0];
return new UseDatabaseOperation(catalogName, databaseName);
}
/** Convert CREATE DATABASE statement. */
private Operation convertCreateDatabase(SqlCreateDatabase sqlCreateDatabase) {
String[] fullDatabaseName = sqlCreateDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new SqlConversionException("create database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
boolean ignoreIfExists = sqlCreateDatabase.isIfNotExists();
String databaseComment = sqlCreateDatabase.getComment()
.map(comment -> comment.getNlsString().getValue()).orElse(null);
// set with properties
Map<String, String> properties = new HashMap<>();
sqlCreateDatabase.getPropertyList().getList().forEach(p ->
properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, databaseComment);
return new CreateDatabaseOperation(catalogName, databaseName, catalogDatabase, ignoreIfExists);
}
/** Convert DROP DATABASE statement. */
private Operation convertDropDatabase(SqlDropDatabase sqlDropDatabase) {
String[] fullDatabaseName = sqlDropDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new SqlConversionException("drop database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
return new DropDatabaseOperation(
catalogName,
databaseName,
sqlDropDatabase.getIfExists(),
sqlDropDatabase.isCascade());
}
/** Convert ALTER DATABASE statement. */
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new SqlConversionException("alter database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
Map<String, String> properties = new HashMap<>();
CatalogDatabase originCatalogDatabase;
Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
if (catalog.isPresent()) {
try {
originCatalogDatabase = catalog.get().getDatabase(databaseName);
properties.putAll(originCatalogDatabase.getProperties());
} catch (DatabaseNotExistException e) {
throw new SqlConversionException(String.format("Database %s not exists", databaseName), e);
}
} else {
throw new SqlConversionException(String.format("Catalog %s not exists", catalogName));
}
// set with properties
sqlAlterDatabase.getPropertyList().getList().forEach(p ->
properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
/** Convert SHOW CATALOGS statement. */
private Operation convertShowCatalogs(SqlShowCatalogs sqlShowCatalogs) {
return new ShowCatalogsOperation();
}
/** Convert SHOW DATABASES statement. */
private Operation convertShowDatabases(SqlShowDatabases sqlShowDatabases) {
return new ShowDatabasesOperation();
}
/** Convert SHOW TABLES statement. */
private Operation convertShowTables(SqlShowTables sqlShowTables) {
return new ShowTablesOperation();
}
/** Convert SHOW FUNCTIONS statement. */
private Operation convertShowFunctions(SqlShowFunctions sqlShowFunctions) {
return new ShowFunctionsOperation();
}
/** Convert CREATE VIEW statement. */
private Operation convertCreateView(SqlCreateView sqlCreateView) {
final SqlNode query = sqlCreateView.getQuery();
final SqlNodeList fieldList = sqlCreateView.getFieldList();
SqlNode validateQuery = flinkPlanner.validate(query);
PlannerQueryOperation operation = toQueryOperation(flinkPlanner, validateQuery);
TableSchema schema = operation.getTableSchema();
// the view column list in CREATE VIEW is optional, if it's not empty, we should update
// the column name with the names in view column list.
if (!fieldList.getList().isEmpty()) {
// alias column names
String[] inputFieldNames = schema.getFieldNames();
String[] aliasFieldNames = fieldList.getList().stream()
.map(SqlNode::toString)
.toArray(String[]::new);
if (inputFieldNames.length != aliasFieldNames.length) {
throw new SqlConversionException(String.format(
"VIEW definition and input fields not match:\n\tDef fields: %s.\n\tInput fields: %s.",
Arrays.toString(aliasFieldNames), Arrays.toString(inputFieldNames)));
}
DataType[] inputFieldTypes = schema.getFieldDataTypes();
schema = TableSchema.builder().fields(aliasFieldNames, inputFieldTypes).build();
}
String originalQuery = getQuotedSqlString(query);
String expandedQuery = getQuotedSqlString(validateQuery);
String comment = sqlCreateView.getComment().map(c -> c.getNlsString().getValue()).orElse(null);
CatalogView catalogView = new CatalogViewImpl(originalQuery,
expandedQuery,
schema,
Collections.emptyMap(),
comment);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlCreateView.fullViewName());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new CreateViewOperation(
identifier,
catalogView,
sqlCreateView.isIfNotExists(),
sqlCreateView.isTemporary());
}
/** Convert DROP VIEW statement. */
private Operation convertDropView(SqlDropView sqlDropView) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlDropView.fullViewName());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new DropViewOperation(identifier, sqlDropView.getIfExists(), sqlDropView.isTemporary());
}
/**
* Create a table schema from {@link SqlCreateTable}. This schema contains computed column
* fields, say, we have a create table DDL statement:
* <blockquote><pre>
* create table t(
* a int,
* b varchar,
* c as to_timestamp(b))
* with (
* 'connector' = 'csv',
* 'k1' = 'v1')
* </pre></blockquote>
*
* <p>The returned table schema contains columns (a:int, b:varchar, c:timestamp).
*
* @param sqlCreateTable sql create table node.
* @return TableSchema
*/
private TableSchema createTableSchema(SqlCreateTable sqlCreateTable) {
// setup table columns
SqlNodeList columnList = sqlCreateTable.getColumnList();
TableSchema physicalSchema = null;
TableSchema.Builder builder = new TableSchema.Builder();
// collect the physical table schema first.
final List<SqlNode> physicalColumns = columnList.getList().stream()
.filter(n -> n instanceof SqlTableColumn).collect(Collectors.toList());
for (SqlNode node : physicalColumns) {
SqlTableColumn column = (SqlTableColumn) node;
final RelDataType relType = column.getType()
.deriveType(
flinkPlanner.getOrCreateSqlValidator(),
column.getType().getNullable());
builder.field(column.getName().getSimple(),
TypeConversions.fromLegacyInfoToDataType(FlinkTypeFactory.toTypeInfo(relType)));
physicalSchema = builder.build();
}
assert physicalSchema != null;
if (sqlCreateTable.containsComputedColumn()) {
throw new SqlConversionException("Computed columns for DDL is not supported yet!");
}
return physicalSchema;
}
/**
* Converts language string to the FunctionLanguage.
*
* @param languageString the language string from SQL parser
* @return supported FunctionLanguage otherwise raise UnsupportedOperationException.
* @throws UnsupportedOperationException if the languageString is not parsable or language is not supported
*/
private FunctionLanguage parseLanguage(String languageString) {
if (StringUtils.isNullOrWhitespaceOnly(languageString)) {
return FunctionLanguage.JAVA;
}
FunctionLanguage language;
try {
language = FunctionLanguage.valueOf(languageString);
} catch (IllegalArgumentException e) {
throw new UnsupportedOperationException(
String.format("Unrecognized function language string %s", languageString), e);
}
if (language.equals(FunctionLanguage.PYTHON)) {
throw new UnsupportedOperationException("Only function language JAVA and SCALA are supported for now.");
}
return language;
}
private PlannerQueryOperation toQueryOperation(FlinkPlannerImpl planner, SqlNode validated) {
// transform to a relational tree
RelRoot relational = planner.rel(validated);
return new PlannerQueryOperation(relational.rel);
}
private String getQuotedSqlString(SqlNode sqlNode) {
SqlParser.Config parserConfig = flinkPlanner.config().getParserConfig();
SqlDialect dialect = new CalciteSqlDialect(SqlDialect.EMPTY_CONTEXT
.withQuotedCasing(parserConfig.unquotedCasing())
.withConformance(parserConfig.conformance())
.withUnquotedCasing(parserConfig.unquotedCasing())
.withIdentifierQuoteString(parserConfig.quoting().string));
return sqlNode.toSqlString(dialect).getSql();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.library.rest.client;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import org.apache.polygene.api.common.Optional;
import org.apache.polygene.api.common.UseDefaults;
import org.apache.polygene.api.composite.TransientComposite;
import org.apache.polygene.api.constraint.Name;
import org.apache.polygene.api.injection.scope.Structure;
import org.apache.polygene.api.injection.scope.Uses;
import org.apache.polygene.api.property.Property;
import org.apache.polygene.api.structure.Application;
import org.apache.polygene.api.structure.ApplicationDescriptor;
import org.apache.polygene.api.structure.Module;
import org.apache.polygene.api.unitofwork.ConcurrentEntityModificationException;
import org.apache.polygene.api.unitofwork.UnitOfWorkCallback;
import org.apache.polygene.api.unitofwork.UnitOfWorkCompletionException;
import org.apache.polygene.api.unitofwork.UnitOfWorkFactory;
import org.apache.polygene.api.usecase.UsecaseBuilder;
import org.apache.polygene.api.value.ValueBuilder;
import org.apache.polygene.api.value.ValueBuilderFactory;
import org.apache.polygene.api.value.ValueComposite;
import org.apache.polygene.bootstrap.AssemblyException;
import org.apache.polygene.bootstrap.ModuleAssembly;
import org.apache.polygene.library.rest.client.api.ContextResourceClient;
import org.apache.polygene.library.rest.client.api.ContextResourceClientFactory;
import org.apache.polygene.library.rest.client.api.ErrorHandler;
import org.apache.polygene.library.rest.client.api.HandlerCommand;
import org.apache.polygene.library.rest.client.spi.ResponseHandler;
import org.apache.polygene.library.rest.client.spi.ResultHandler;
import org.apache.polygene.library.rest.common.Resource;
import org.apache.polygene.library.rest.common.ValueAssembler;
import org.apache.polygene.library.rest.common.link.Link;
import org.apache.polygene.library.rest.common.link.Links;
import org.apache.polygene.library.rest.common.link.LinksBuilder;
import org.apache.polygene.library.rest.common.link.LinksUtil;
import org.apache.polygene.library.rest.server.api.ContextResource;
import org.apache.polygene.library.rest.server.api.ContextRestlet;
import org.apache.polygene.library.rest.server.api.ObjectSelection;
import org.apache.polygene.library.rest.server.api.ResourceDelete;
import org.apache.polygene.library.rest.server.api.ResourceIndex;
import org.apache.polygene.library.rest.server.api.SubResource;
import org.apache.polygene.library.rest.server.api.SubResources;
import org.apache.polygene.library.rest.server.api.constraint.InteractionValidation;
import org.apache.polygene.library.rest.server.api.constraint.Requires;
import org.apache.polygene.library.rest.server.api.constraint.RequiresValid;
import org.apache.polygene.library.rest.server.api.dci.Role;
import org.apache.polygene.library.rest.server.assembler.RestServerAssembler;
import org.apache.polygene.library.rest.server.restlet.NullCommandResult;
import org.apache.polygene.library.rest.server.spi.CommandResult;
import org.apache.polygene.test.AbstractPolygeneTest;
import org.apache.polygene.test.util.FreePortFinder;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.restlet.Client;
import org.restlet.Request;
import org.restlet.Response;
import org.restlet.Server;
import org.restlet.Uniform;
import org.restlet.data.ChallengeScheme;
import org.restlet.data.Form;
import org.restlet.data.MediaType;
import org.restlet.data.Protocol;
import org.restlet.data.Reference;
import org.restlet.resource.ResourceException;
import org.restlet.security.ChallengeAuthenticator;
import org.restlet.security.MapVerifier;
import org.restlet.security.User;
import org.restlet.service.MetadataService;
import static org.apache.polygene.bootstrap.ImportedServiceDeclaration.NEW_OBJECT;
import static org.apache.polygene.library.rest.client.api.HandlerCommand.command;
import static org.apache.polygene.library.rest.client.api.HandlerCommand.query;
import static org.apache.polygene.library.rest.client.api.HandlerCommand.refresh;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo;
public class ContextResourceClientFactoryTest
extends AbstractPolygeneTest
{
private Server server;
private ContextResourceClient crc;
public static String command = null; // Commands will set this
@Override
public void assemble( ModuleAssembly module )
throws AssemblyException
{
// General setup of client and server
new ClientAssembler().assemble( module );
new ValueAssembler().assemble( module );
new RestServerAssembler().assemble( module );
module.objects( NullCommandResult.class );
module.importedServices( CommandResult.class ).importedBy( NEW_OBJECT );
module.importedServices( MetadataService.class ).importedBy( NEW_OBJECT );
module.objects( MetadataService.class );
// Test specific setup
module.values( TestQuery.class, TestResult.class, TestCommand.class );
module.forMixin( TestQuery.class ).declareDefaults().abc().set( "def" );
module.objects( RootRestlet.class, RootResource.class, RootContext.class, SubResource1.class, PagesResource.class );
module.objects( DescribableContext.class );
module.transients( TestComposite.class );
module.defaultServices();
}
@Override
protected void initApplication( Application app )
throws Exception
{
}
@BeforeEach
public void startWebServer()
throws Exception
{
int port = FreePortFinder.findFreePortOnLoopback();
server = new Server( Protocol.HTTP, port );
ContextRestlet restlet = objectFactory.newObject( ContextRestlet.class, new org.restlet.Context() );
ChallengeAuthenticator guard = new ChallengeAuthenticator( null, ChallengeScheme.HTTP_BASIC, "testRealm" );
MapVerifier mapVerifier = new MapVerifier();
mapVerifier.getLocalSecrets().put( "rickard", "secret".toCharArray() );
guard.setVerifier( mapVerifier );
guard.setNext( restlet );
server.setNext( guard );
server.start();
//START SNIPPET: client-create1
Client client = new Client( Protocol.HTTP );
ContextResourceClientFactory contextResourceClientFactory = objectFactory.newObject( ContextResourceClientFactory.class, client );
contextResourceClientFactory.setAcceptedMediaTypes( MediaType.APPLICATION_JSON );
//END SNIPPET: client-create1
//START SNIPPET: client-create2
contextResourceClientFactory.setErrorHandler( new ErrorHandler().onError( ErrorHandler.AUTHENTICATION_REQUIRED, new ResponseHandler()
{
boolean tried = false;
@Override
public HandlerCommand handleResponse( Response response, ContextResourceClient client )
{
if( tried )
{
throw new ResourceException( response.getStatus() );
}
tried = true;
client.getContextResourceClientFactory().getInfo().setUser( new User( "rickard", "secret" ) );
// Try again
return refresh();
}
} ).onError( ErrorHandler.RECOVERABLE_ERROR, new ResponseHandler()
{
@Override
public HandlerCommand handleResponse( Response response, ContextResourceClient client )
{
// Try to restart
return refresh();
}
} ) );
//END SNIPPET: client-create2
//START SNIPPET: client-create3
Reference ref = new Reference( "http://localhost:" + port + '/' );
crc = contextResourceClientFactory.newClient( ref );
//END SNIPPET: client-create3
}
@AfterEach
public void stopWebServer()
throws Exception
{
server.stop();
}
@Override
protected Application newApplicationInstance( ApplicationDescriptor applicationModel )
{
return applicationModel.newInstance( polygene.api(), new MetadataService() );
}
@Test
public void testQueryWithoutValue()
{
//START SNIPPET: query-without-value
crc.onResource( new ResultHandler<Resource>()
{
@Override
public HandlerCommand handleResult( Resource result, ContextResourceClient client )
{
return query( "querywithoutvalue" );
}
} ).
onQuery( "querywithoutvalue", new ResultHandler<TestResult>()
{
@Override
public HandlerCommand handleResult( TestResult result, ContextResourceClient client )
{
assertThat( result.xyz().get(), equalTo( "bar" ) );
return null;
}
} );
crc.start();
//END SNIPPET: query-without-value
}
@Test
public void testQueryAndCommand()
{
//START SNIPPET: query-and-command
crc.onResource( new ResultHandler<Resource>()
{
@Override
public HandlerCommand handleResult( Resource result, ContextResourceClient client )
{
return query( "querywithvalue", null );
}
} ).onProcessingError( "querywithvalue", new ResultHandler<TestQuery>()
{
@Override
public HandlerCommand handleResult( TestQuery result, ContextResourceClient client )
{
ValueBuilder<TestQuery> builder = valueBuilderFactory.newValueBuilderWithPrototype( result );
builder.prototype().abc().set( "abc" + builder.prototype().abc().get() );
return query( "querywithvalue", builder.newInstance() );
}
} ).onQuery( "querywithvalue", new ResultHandler<TestResult>()
{
@Override
public HandlerCommand handleResult( TestResult result, ContextResourceClient client )
{
return command( "commandwithvalue", null );
}
} ).onProcessingError( "commandwithvalue", new ResultHandler<Form>()
{
@Override
public HandlerCommand handleResult( Form result, ContextResourceClient client )
{
result.set( "abc", "right" );
return command( "commandwithvalue", result );
}
} );
crc.start();
//END SNIPPET: query-and-command
}
@Test
public void testQueryListAndCommand()
{
//START SNIPPET: query-list-and-command
crc.onResource( new ResultHandler<Resource>()
{
@Override
public HandlerCommand handleResult( Resource result, ContextResourceClient client )
{
return query( "commandwithvalue" );
}
} ).onQuery( "commandwithvalue", new ResultHandler<Links>()
{
@Override
public HandlerCommand handleResult( Links result, ContextResourceClient client )
{
Link link = LinksUtil.withId( "right", result );
return command( link );
}
} ).onCommand( "commandwithvalue", new ResponseHandler()
{
@Override
public HandlerCommand handleResponse( Response response, ContextResourceClient client )
{
System.out.println( "Done" );
return null;
}
} );
crc.start();
//END SNIPPET: query-list-and-command
}
@Test
public void testQueryListAndCommandProgressive()
{
//START SNIPPET: query-list-and-command-progressive
crc.onResource( new ResultHandler<Resource>()
{
@Override
public HandlerCommand handleResult( Resource result, ContextResourceClient client )
{
return query( "commandwithvalue" ).onSuccess( new ResultHandler<Links>()
{
@Override
public HandlerCommand handleResult( Links result, ContextResourceClient client )
{
Link link = LinksUtil.withId( "right", result );
return command( link ).onSuccess( new ResponseHandler()
{
@Override
public HandlerCommand handleResponse( Response response, ContextResourceClient client )
{
System.out.println( "Done" );
return null;
}
} );
}
} );
}
} );
crc.start();
//END SNIPPET: query-list-and-command-progressive
}
@Test
public void testIndexedResource()
{
crc.newClient( "subcontext/pages/" ).onResource( new ResultHandler<Resource>()
{
@Override
public HandlerCommand handleResult( Resource result, ContextResourceClient client )
{
return query( "index" );
}
} ).onQuery( "index", new ResultHandler<Links>()
{
@Override
public HandlerCommand handleResult( Links result, ContextResourceClient client )
{
assertThat( result.links().get().size(), equalTo( 3 ) );
return null;
}
} )
.start();
}
public interface TestQuery
extends ValueComposite
{
@UseDefaults
Property<String> abc();
}
public interface TestCommand
extends ValueComposite
{
Property<String> entity();
}
public interface TestResult
extends ValueComposite
{
Property<String> xyz();
}
public static class RootRestlet
extends ContextRestlet
{
@Override
protected Uniform createRoot( Request request, Response response )
{
return objectFactory.newObject( RootResource.class, this );
}
}
public static class RootResource
extends ContextResource
implements SubResources, ResourceDelete
{
private static TestComposite instance;
private RootContext rootContext()
{
return context( RootContext.class );
}
public RootResource()
{
}
public TestResult querywithvalue( TestQuery testQuery )
throws Throwable
{
return rootContext().queryWithValue( testQuery );
}
public TestResult querywithoutvalue()
throws Throwable
{
return rootContext().queryWithoutValue();
}
public String querywithstringresult( TestQuery query )
throws Throwable
{
return rootContext().queryWithStringResult( query );
}
public void commandwithvalue( TestCommand command )
throws Throwable
{
rootContext().commandWithValue( command );
}
public Links commandwithvalue()
{
return new LinksBuilder( module ).
command( "commandwithvalue" ).
addLink( "Command ABC", "right" ).
addLink( "Command XYZ", "wrong" ).newLinks();
}
@Override
public void delete()
throws IOException
{
rootContext().delete();
}
public void resource( String currentSegment )
{
ObjectSelection objectSelection = ObjectSelection.current();
objectSelection.select( new File( "" ) );
if( instance == null )
{
objectSelection.select( instance = module.newTransient( TestComposite.class ) );
}
else
{
objectSelection.select( instance );
}
subResource( SubResource1.class );
}
}
public static class SubResource1
extends ContextResource
implements InteractionValidation
{
public SubResource1()
{
}
@Requires( File.class )
public void commandWithRoleRequirement()
{
context( SubContext.class ).commandWithRoleRequirement();
}
// Interaction validation
private static boolean xyzValid = true;
@RequiresValid( "xyz" )
public void xyz( @Name( "valid" ) boolean valid )
{
xyzValid = valid;
}
@RequiresValid( "notxyz" )
public void notxyz( @Name( "valid" ) boolean valid )
{
xyzValid = valid;
}
public boolean isValid( String name )
{
if( name.equals( "xyz" ) )
{
return xyzValid;
}
else if( name.equals( "notxyz" ) )
{
return !xyzValid;
}
else
{
return false;
}
}
@Requires( File.class )
public TestResult queryWithRoleRequirement( TestQuery query )
{
return context( SubContext.class ).queryWithRoleRequirement( query );
}
public TestResult genericquery( TestQuery query )
throws Throwable
{
return context( SubContext2.class ).genericQuery( query );
}
public TestResult querywithvalue( TestQuery query )
throws Throwable
{
return context( SubContext.class ).queryWithValue( query );
}
@SubResource
public void subresource1()
{
subResource( SubResource1.class );
}
@SubResource
public void subresource2()
{
subResource( SubResource1.class );
}
@SubResource
public void pages()
{
subResource( PagesResource.class );
}
}
public static class RootContext
{
private static int count = 0;
@Structure
UnitOfWorkFactory uowf;
@Structure
ValueBuilderFactory vbf;
public TestResult queryWithValue( TestQuery query )
{
return vbf.newValueFromSerializedState( TestResult.class, "{\"xyz\":\"" + query.abc().get() + "\"}" );
}
public TestResult queryWithoutValue()
{
return vbf.newValueFromSerializedState( TestResult.class, "{\"xyz\":\"bar\"}" );
}
public String queryWithStringResult( TestQuery query )
{
return "bar";
}
public int queryWithIntegerResult( TestQuery query )
{
return 7;
}
public void commandWithValue( TestCommand command )
{
if( !command.entity().get().equals( "right" ) )
{
throw new IllegalArgumentException( "Wrong argument" );
}
// Done
}
public void idempotentCommandWithValue( TestCommand command )
throws ConcurrentEntityModificationException
{
// On all but every third invocation, throw a concurrency exception
// This is to test retries on the server-side
count++;
if( count % 3 != 0 )
{
uowf.currentUnitOfWork().addUnitOfWorkCallback( new UnitOfWorkCallback()
{
public void beforeCompletion()
throws UnitOfWorkCompletionException
{
throw new ConcurrentEntityModificationException( Collections.emptyMap(),
UsecaseBuilder.newUsecase( "Testing" ) );
}
public void afterCompletion( UnitOfWorkStatus status )
{
}
} );
}
if( !command.entity().get().equals( "right" ) )
{
throw new IllegalArgumentException( "Wrong argument" );
}
// Done
}
public void delete()
{
// Ok!
command = "delete";
}
}
public static class SubContext
implements InteractionValidation
{
@Structure
Module module;
public TestResult queryWithValue( TestQuery query )
{
return module.newValueFromSerializedState( TestResult.class, "{\"xyz\":\"bar\"}" );
}
// Test interaction constraints
@Requires( File.class )
public TestResult queryWithRoleRequirement( TestQuery query )
{
return module.newValueFromSerializedState( TestResult.class, "{\"xyz\":\"bar\"}" );
}
@Requires( File.class )
public void commandWithRoleRequirement()
{
}
// Interaction validation
private static boolean xyzValid = true;
@RequiresValid( "xyz" )
public void xyz( @Name( "valid" ) boolean valid )
{
xyzValid = valid;
}
@RequiresValid( "notxyz" )
public void notxyz( @Name( "valid" ) boolean valid )
{
xyzValid = valid;
}
public boolean isValid( String name )
{
if( name.equals( "xyz" ) )
{
return xyzValid;
}
else if( name.equals( "notxyz" ) )
{
return !xyzValid;
}
else
{
return false;
}
}
}
public static class SubContext2
{
@Structure
Module module;
public TestResult genericQuery( TestQuery query )
{
return module.newValueFromSerializedState( TestResult.class, "{\"xyz\":\"bar\"}" );
}
}
public static class PagesResource extends ContextResource
implements ResourceIndex<Links>
{
@Override
public Links index()
{
return new LinksBuilder( module )
.addLink( "Page1", "page1" )
.addLink( "Page2", "page2" )
.addLink( "Page3", "page3" )
.newLinks();
}
}
public static class DescribableContext
{
@Structure
Module module;
Describable describable = new Describable();
public void bind( @Uses DescribableData describableData )
{
describable.bind( describableData );
}
public String description()
{
return describable.description();
}
public void changeDescription( @Name( "description" ) String newDesc )
{
describable.changeDescription( newDesc );
}
public static class Describable
extends Role<DescribableData>
{
public void changeDescription( String newDesc )
{
self.description().set( newDesc );
}
public String description()
{
return self.description().get();
}
}
}
public interface DescribableData
{
@UseDefaults
Property<String> description();
}
public interface TestComposite
extends TransientComposite, DescribableData
{
@Optional
Property<String> foo();
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInspection.dataFlow.interpreter.DataFlowInterpreter;
import com.intellij.codeInspection.dataFlow.java.anchor.JavaExpressionAnchor;
import com.intellij.codeInspection.dataFlow.java.inst.AssignInstruction;
import com.intellij.codeInspection.dataFlow.jvm.JvmDfaMemoryStateImpl;
import com.intellij.codeInspection.dataFlow.jvm.problems.ContractFailureProblem;
import com.intellij.codeInspection.dataFlow.jvm.problems.JvmDfaProblem;
import com.intellij.codeInspection.dataFlow.lang.DfaAnchor;
import com.intellij.codeInspection.dataFlow.lang.UnsatisfiedConditionProblem;
import com.intellij.codeInspection.dataFlow.lang.ir.*;
import com.intellij.codeInspection.dataFlow.memory.DfaMemoryState;
import com.intellij.codeInspection.dataFlow.memory.DistinctPairSet;
import com.intellij.codeInspection.dataFlow.memory.EqClass;
import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeBinOp;
import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet;
import com.intellij.codeInspection.dataFlow.types.*;
import com.intellij.codeInspection.dataFlow.value.*;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiExpression;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.ig.psiutils.ExpressionUtils;
import one.util.streamex.EntryStream;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.function.Predicate;
public class TrackingDfaMemoryState extends JvmDfaMemoryStateImpl {
private MemoryStateChange myHistory;
protected TrackingDfaMemoryState(DfaValueFactory factory) {
super(factory);
myHistory = null;
}
protected TrackingDfaMemoryState(TrackingDfaMemoryState toCopy) {
super(toCopy);
myHistory = toCopy.myHistory;
}
@NotNull
@Override
public TrackingDfaMemoryState createCopy() {
return new TrackingDfaMemoryState(this);
}
@Override
public void afterMerge(@NotNull DfaMemoryState other) {
super.afterMerge(other);
assert other instanceof TrackingDfaMemoryState;
MemoryStateChange otherHistory = ((TrackingDfaMemoryState)other).myHistory;
myHistory = myHistory == null ? otherHistory : myHistory.merge(otherHistory);
}
private Map<DfaVariableValue, Set<Relation>> getRelations() {
Map<DfaVariableValue, Set<Relation>> result = new HashMap<>();
forRecordedVariableTypes((var, type) -> {
if (type instanceof DfConstantType) {
result.computeIfAbsent(var, k -> new HashSet<>()).add(new Relation(RelationType.EQ, getFactory().fromDfType(type)));
}
if (type instanceof DfAntiConstantType) {
Set<?> notValues = ((DfAntiConstantType<?>)type).getNotValues();
if (!notValues.isEmpty()) {
DfType dfType = var.getDfType();
for (Object notValue : notValues) {
result.computeIfAbsent(var, k -> new HashSet<>()).add(
new Relation(RelationType.NE, getFactory().fromDfType(DfTypes.constant(notValue, dfType))));
}
}
}
});
for (EqClass eqClass : getNonTrivialEqClasses()) {
for (DfaVariableValue var : eqClass) {
Set<Relation> set = result.computeIfAbsent(var, k -> new HashSet<>());
for (DfaVariableValue eqVar : eqClass) {
if (eqVar != var) {
set.add(new Relation(RelationType.EQ, eqVar));
}
}
}
}
for (DistinctPairSet.DistinctPair classPair : getDistinctClassPairs()) {
EqClass first = classPair.getFirst();
EqClass second = classPair.getSecond();
RelationType plain = classPair.isOrdered() ? RelationType.LT : RelationType.NE;
RelationType flipped = Objects.requireNonNull(plain.getFlipped());
for (DfaVariableValue var1 : first) {
for (DfaVariableValue var2 : second) {
result.computeIfAbsent(var1, k -> new HashSet<>()).add(new Relation(plain, var2));
result.computeIfAbsent(var2, k -> new HashSet<>()).add(new Relation(flipped, var1));
}
}
}
return result;
}
void recordChange(Instruction instruction, TrackingDfaMemoryState previous) {
Map<DfaVariableValue, Change> result = getChangeMap(previous);
DfaValue value = isEmptyStack() ? getFactory().getUnknown() : peek();
myHistory = MemoryStateChange.create(myHistory, instruction, result, value);
}
@NotNull
private Map<DfaVariableValue, Change> getChangeMap(TrackingDfaMemoryState previous) {
Map<DfaVariableValue, Change> changeMap = new HashMap<>();
Set<DfaVariableValue> varsToCheck = new HashSet<>();
previous.forRecordedVariableTypes((value, state) -> varsToCheck.add(value));
forRecordedVariableTypes((value, state) -> varsToCheck.add(value));
for (DfaVariableValue value : varsToCheck) {
DfType newType = getDfType(value);
DfType oldType = previous.getDfType(value);
if (!newType.equals(oldType)) {
changeMap.put(value, new Change(Collections.emptySet(), Collections.emptySet(), oldType, newType));
}
}
Map<DfaVariableValue, Set<Relation>> oldRelations = previous.getRelations();
Map<DfaVariableValue, Set<Relation>> newRelations = getRelations();
varsToCheck.clear();
varsToCheck.addAll(oldRelations.keySet());
varsToCheck.addAll(newRelations.keySet());
for (DfaVariableValue value : varsToCheck) {
Set<Relation> oldValueRelations = oldRelations.getOrDefault(value, Collections.emptySet());
Set<Relation> newValueRelations = newRelations.getOrDefault(value, Collections.emptySet());
if (!oldValueRelations.equals(newValueRelations)) {
Set<Relation> added = new HashSet<>(newValueRelations);
added.removeAll(oldValueRelations);
Set<Relation> removed = new HashSet<>(oldValueRelations);
removed.removeAll(newValueRelations);
changeMap.compute(
value, (v, change) -> change == null
? Change.create(removed, added, DfType.BOTTOM, DfType.BOTTOM)
: Change.create(removed, added, change.myOldType, change.myNewType));
}
}
return changeMap;
}
MemoryStateChange getHistory() {
return myHistory;
}
/**
* Records a bridge changes. A bridge states are states which process the same input instruction,
* but in result jump to another place in the program (other than this state target).
* A bridge change is the difference between this state and all states which have different
* target instruction. Bridges allow tracking what else is processed in parallel with current state,
* including states which may not arrive into target place. E.g. consider two states like this:
*
* <pre>
* this_state other_state
* | |
* some_condition <-- bridge is recorded here
* |(true) |(false)
* | return
* |
* always_true_condition <-- explanation is requested here
* </pre>
*
* Thanks to the bridge we know that {@code some_condition} could be important for
* {@code always_true_condition} explanation.
*
* @param instruction instruction which
* @param bridgeStates
*/
void addBridge(Instruction instruction, List<TrackingDfaMemoryState> bridgeStates) {
Map<DfaVariableValue, Change> changeMap = null;
for (TrackingDfaMemoryState bridge : bridgeStates) {
Map<DfaVariableValue, Change> newChangeMap = getChangeMap(bridge);
if (changeMap == null) {
changeMap = newChangeMap;
} else {
changeMap.keySet().retainAll(newChangeMap.keySet());
changeMap.replaceAll((var, old) -> old.unite(newChangeMap.get(var)));
changeMap.values().removeIf(Objects::isNull);
}
if (changeMap.isEmpty()) {
break;
}
}
if (changeMap != null && !changeMap.isEmpty() && myHistory != null) {
myHistory = myHistory.withBridge(instruction, changeMap);
}
}
static class Relation {
final @NotNull RelationType myRelationType;
final @NotNull DfaValue myCounterpart;
Relation(@NotNull RelationType type, @NotNull DfaValue counterpart) {
myRelationType = type;
myCounterpart = counterpart;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Relation relation = (Relation)o;
return myRelationType == relation.myRelationType &&
myCounterpart.equals(relation.myCounterpart);
}
@Override
public int hashCode() {
return Objects.hash(myRelationType, myCounterpart);
}
@Override
public String toString() {
return myRelationType + " " + myCounterpart;
}
}
static final class Change {
final @NotNull Set<Relation> myRemovedRelations;
final @NotNull Set<Relation> myAddedRelations;
final @NotNull DfType myOldType;
final @NotNull DfType myNewType;
private Change(@NotNull Set<Relation> removedRelations, @NotNull Set<Relation> addedRelations, @NotNull DfType oldType, @NotNull DfType newType) {
myRemovedRelations = removedRelations.isEmpty() ? Collections.emptySet() : removedRelations;
myAddedRelations = addedRelations.isEmpty() ? Collections.emptySet() : addedRelations;
myOldType = oldType;
myNewType = newType;
}
@Nullable
static Change create(Set<Relation> removedRelations, Set<Relation> addedRelations, DfType oldType, DfType newType) {
if (removedRelations.isEmpty() && addedRelations.isEmpty() && oldType == DfType.BOTTOM && newType == DfType.BOTTOM) {
return null;
}
return new Change(removedRelations, addedRelations, oldType, newType);
}
/**
* Creates a Change which reflects changes actual for both this and other change
* @param other other change to unite with
* @return new change or null if this and other change has nothing in common
*/
@Nullable
Change unite(Change other) {
Set<Relation> added = new HashSet<>(ContainerUtil.intersection(myAddedRelations, other.myAddedRelations));
Set<Relation> removed = new HashSet<>(ContainerUtil.intersection(myRemovedRelations, other.myRemovedRelations));
DfType oldType = myOldType.join(other.myOldType);
DfType newType = myNewType.join(other.myNewType);
if (oldType.equals(newType)) {
oldType = newType = DfType.BOTTOM;
}
return create(removed, added, oldType, newType);
}
@Override
public String toString() {
String removed = StreamEx.of(myRemovedRelations).map(Object::toString).append(myOldType.toString())
.without("").joining(", ");
String added = StreamEx.of(myAddedRelations).map(Object::toString).append(myNewType.toString())
.without("").joining(", ");
return (removed.isEmpty() ? "" : "-{" + removed + "} ") + (added.isEmpty() ? "" : "+{" + added + "}");
}
}
static final class MemoryStateChange {
private final @NotNull List<MemoryStateChange> myPrevious;
final @NotNull Instruction myInstruction;
final @NotNull Map<DfaVariableValue, Change> myChanges;
final @NotNull DfaValue myTopOfStack;
final @NotNull Map<DfaVariableValue, Change> myBridgeChanges;
int myCursor = 0;
private MemoryStateChange(@NotNull List<MemoryStateChange> previous,
@NotNull Instruction instruction,
@NotNull Map<DfaVariableValue, Change> changes,
@NotNull DfaValue topOfStack,
@NotNull Map<DfaVariableValue, Change> bridgeChanges) {
myPrevious = previous;
myInstruction = instruction;
myChanges = changes;
myTopOfStack = topOfStack;
myBridgeChanges = bridgeChanges;
}
void reset() {
for (MemoryStateChange change = this; change != null; change = change.getPrevious()) {
change.myCursor = 0;
}
}
boolean advance() {
if (myCursor < myPrevious.size() && !myPrevious.get(myCursor).advance()) {
myCursor++;
MemoryStateChange previous = getPrevious();
if (previous != null) {
previous.reset();
}
}
return myCursor < myPrevious.size();
}
@Contract("null -> null")
@Nullable
MemoryStateChange findExpressionPush(@Nullable PsiExpression expression) {
if (expression == null) return null;
return findChange(change -> change.getExpression() == expression, false);
}
@Contract("null -> null")
@Nullable
MemoryStateChange findSubExpressionPush(@Nullable PsiExpression expression) {
if (expression == null) return null;
PsiElement topElement = ExpressionUtils.getPassThroughParent(expression);
return findChange(change -> {
PsiExpression changeExpression = change.getExpression();
if (changeExpression == null) return false;
return changeExpression == expression ||
(PsiTreeUtil.isAncestor(expression, changeExpression, true) &&
ExpressionUtils.getPassThroughParent(changeExpression) == topElement);
}, false);
}
MemoryStateChange findRelation(DfaVariableValue value, @NotNull Predicate<Relation> relationPredicate, boolean startFromSelf) {
return findChange(change -> {
if (change.myInstruction instanceof AssignInstruction && change.myTopOfStack == value) return true;
Change varChange = change.myChanges.get(value);
if (varChange != null && varChange.myAddedRelations.stream().anyMatch(relationPredicate)) return true;
Change bridgeVarChange = change.myBridgeChanges.get(value);
return bridgeVarChange != null && bridgeVarChange.myAddedRelations.stream().anyMatch(relationPredicate);
}, startFromSelf);
}
@NotNull
<T> FactDefinition<T> findFact(DfaValue value, FactExtractor<T> extractor) {
if (value instanceof DfaVariableValue) {
for (MemoryStateChange change = this; change != null; change = change.getPrevious()) {
FactDefinition<T> factPair = factFromChange(extractor, change, change.myChanges.get(value));
if (factPair != null) return factPair;
if (!(change.myInstruction instanceof ConditionalGotoInstruction)) {
factPair = factFromChange(extractor, change, change.myBridgeChanges.get(value));
if (factPair != null) return factPair;
}
if (change.myInstruction instanceof AssignInstruction && change.myTopOfStack == value && change.getPrevious() != null) {
FactDefinition<T> fact = change.getPrevious().findFact(value, extractor);
return new FactDefinition<>(change, fact.myFact, fact.myOldFact);
}
}
return new FactDefinition<>(null, extractor.extract(((DfaVariableValue)value).getInherentType()), null);
}
if (value instanceof DfaBinOpValue) {
DfaBinOpValue binOp = (DfaBinOpValue)value;
FactDefinition<T> left = findFact(binOp.getLeft(), extractor);
FactDefinition<T> right = findFact(binOp.getRight(), extractor);
if (left.myFact instanceof LongRangeSet && right.myFact instanceof LongRangeSet) {
LongRangeBinOp op = binOp.getOperation();
@SuppressWarnings("unchecked")
T result = (T)op.eval((LongRangeSet)left.myFact, (LongRangeSet)right.myFact, binOp.getDfType().getLongRangeType());
T oldFact = null;
if (left.myOldFact != null && right.myOldFact != null) {
//noinspection unchecked
oldFact = (T)op.eval((LongRangeSet)left.myOldFact, (LongRangeSet)right.myOldFact, binOp.getDfType().getLongRangeType());
}
return new FactDefinition<>(null, Objects.requireNonNull(result), oldFact);
}
}
return new FactDefinition<>(null, extractor.extract(value.getDfType()), null);
}
@Nullable
MemoryStateChange getPrevious() {
return myCursor == myPrevious.size() ? null : myPrevious.get(myCursor);
}
public MemoryStateChange getNonMerge() {
MemoryStateChange change = myInstruction instanceof MergeInstruction ? getPrevious() : this;
assert change == null || !(change.myInstruction instanceof MergeInstruction);
return change;
}
@Nullable
private static <T> FactDefinition<T> factFromChange(FactExtractor<T> extractor, MemoryStateChange change, Change varChange) {
if (varChange != null) {
T newFact = extractor.extract(varChange.myNewType);
T oldFact = extractor.extract(varChange.myOldType);
if (!newFact.equals(oldFact)) {
if (change.myInstruction instanceof EnsureInstruction &&
((EnsureInstruction)change.myInstruction).getProblem() instanceof ContractFailureProblem) {
change = change.getPrevious();
}
return new FactDefinition<>(change, newFact, oldFact);
}
}
return null;
}
@Nullable
private MemoryStateChange findChange(@NotNull Predicate<MemoryStateChange> predicate, boolean startFromSelf) {
for (MemoryStateChange change = startFromSelf ? this : getPrevious(); change != null; change = change.getPrevious()) {
if (predicate.test(change)) {
return change;
}
}
return null;
}
@Nullable
PsiExpression getExpression() {
if (myInstruction instanceof ExpressionPushingInstruction) {
DfaAnchor anchor = ((ExpressionPushingInstruction)myInstruction).getDfaAnchor();
if (anchor instanceof JavaExpressionAnchor) {
return ((JavaExpressionAnchor)anchor).getExpression();
}
}
if (myInstruction instanceof ConditionalGotoInstruction) {
return ObjectUtils.tryCast(((ConditionalGotoInstruction)myInstruction).getPsiAnchor(), PsiExpression.class);
}
if (myInstruction instanceof EnsureInstruction) {
UnsatisfiedConditionProblem problem = ((EnsureInstruction)myInstruction).getProblem();
if (problem instanceof JvmDfaProblem) {
return ObjectUtils.tryCast(((JvmDfaProblem<?>)problem).getAnchor(), PsiExpression.class);
}
}
return null;
}
@NotNull
public MemoryStateChange merge(MemoryStateChange change) {
if (change == this) return this;
Set<MemoryStateChange> previous = new LinkedHashSet<>();
if (myInstruction instanceof MergeInstruction) {
previous.addAll(myPrevious);
} else {
previous.add(this);
}
if (change.myInstruction instanceof MergeInstruction) {
previous.addAll(change.myPrevious);
} else {
previous.add(change);
}
if (previous.size() == 1) {
return previous.iterator().next();
}
return new MemoryStateChange(new ArrayList<>(previous), new MergeInstruction(), Collections.emptyMap(), myTopOfStack.getFactory().getUnknown(),
Collections.emptyMap());
}
MemoryStateChange withBridge(@NotNull Instruction instruction, @NotNull Map<DfaVariableValue, Change> bridge) {
if (myInstruction != instruction) {
if (instruction instanceof ConditionalGotoInstruction &&
getExpression() == ((ConditionalGotoInstruction)instruction).getPsiAnchor()) {
instruction = myInstruction;
} else {
return new MemoryStateChange(
Collections.singletonList(this), instruction, Collections.emptyMap(), myTopOfStack.getFactory().getUnknown(), bridge);
}
}
assert myBridgeChanges.isEmpty();
return new MemoryStateChange(myPrevious, instruction, myChanges, myTopOfStack, bridge);
}
@Nullable
static MemoryStateChange create(@Nullable MemoryStateChange previous,
@NotNull Instruction instruction,
@NotNull Map<DfaVariableValue, Change> result,
@NotNull DfaValue value) {
if (result.isEmpty() && DfaTypeValue.isUnknown(value)) {
return previous;
}
return new MemoryStateChange(ContainerUtil.createMaybeSingletonList(previous), instruction, result, value, Collections.emptyMap());
}
MemoryStateChange[] flatten() {
List<MemoryStateChange> changes = StreamEx.iterate(this, Objects::nonNull, change -> change.getPrevious()).toList();
Collections.reverse(changes);
return changes.toArray(new MemoryStateChange[0]);
}
String dump() {
return StreamEx.of(flatten()).joining("\n");
}
@Override
public String toString() {
return myInstruction.getIndex() + " " + myInstruction + ": " + myTopOfStack +
(myChanges.isEmpty() ? "" :
"; Changes: " + EntryStream.of(myChanges).join(": ", "\n\t", "").joining()) +
(myBridgeChanges.isEmpty() ? "" :
"; Bridge changes: " + EntryStream.of(myBridgeChanges).join(": ", "\n\t", "").joining());
}
}
private static class MergeInstruction extends Instruction {
@Override
public DfaInstructionState[] accept(@NotNull DataFlowInterpreter interpreter, @NotNull DfaMemoryState stateBefore) {
return DfaInstructionState.EMPTY_ARRAY;
}
@Override
public String toString() {
return "STATE_MERGE";
}
}
static class FactDefinition<T> {
final @Nullable MemoryStateChange myChange;
/** Fact value after change */
final @NotNull T myFact;
/** Fact value before change, if known */
final @Nullable T myOldFact;
FactDefinition(@Nullable MemoryStateChange change, @NotNull T fact, @Nullable T oldFact) {
myChange = change;
myFact = fact;
myOldFact = oldFact;
}
@Override
public String toString() {
return myFact + " @ " + myChange;
}
}
interface FactExtractor<T> {
@NotNull T extract(DfType type);
static FactExtractor<DfaNullability> nullability() {
return DfaNullability::fromDfType;
}
static FactExtractor<TypeConstraint> constraint() {
return TypeConstraint::fromDfType;
}
static FactExtractor<LongRangeSet> range() {
return DfLongType::extractRange;
}
}
}
| |
/*
Derby - Class org.apache.derby.diag.StatementCache
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.diag;
import java.security.PrivilegedAction;
import java.security.AccessController;
import java.sql.ResultSetMetaData;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.Collection;
import java.util.Iterator;
import java.util.Vector;
import org.apache.derby.shared.common.error.StandardException;
import org.apache.derby.shared.common.reference.Limits;
import org.apache.derby.iapi.services.cache.CacheManager;
import org.apache.derby.iapi.services.context.Context;
import org.apache.derby.iapi.services.context.ContextService;
import org.apache.derby.iapi.sql.ResultColumnDescriptor;
import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;
import org.apache.derby.iapi.util.StringUtil;
import org.apache.derby.impl.jdbc.EmbedResultSetMetaData;
import org.apache.derby.impl.sql.GenericPreparedStatement;
import org.apache.derby.impl.sql.GenericStatement;
import org.apache.derby.impl.sql.conn.CachedStatement;
import org.apache.derby.vti.VTITemplate;
/**
StatementCache is a virtual table that shows the contents of the SQL statement cache.
This virtual table can be invoked by calling it directly.
<PRE> select * from new org.apache.derby.diag.StatementCache() t</PRE>
<P>The StatementCache virtual table has the following columns:
<UL>
<LI> ID CHAR(36) - not nullable. Internal identifier of the compiled statement.
<LI> SCHEMANAME VARCHAR(128) - nullable. Schema the statement was compiled in.
<LI> SQL_TEXT VARCHAR(32672) - not nullable. Text of the statement
<LI> UNICODE BIT/BOOLEAN - not nullable. Always true.
<LI> VALID BIT/BOOLEAN - not nullable. True if the statement is currently valid, false otherwise
<LI> COMPILED_AT TIMESTAMP nullable - time statement was compiled, requires STATISTICS TIMING to be enabled.
</UL>
<P>
The internal identifier of a cached statement matches the toString() method of a PreparedStatement object for a Derby database.
<P>
This class also provides a static method to empty the statement cache, StatementCache.emptyCache()
*/
public final class StatementCache extends VTITemplate {
private int position = -1;
private Vector<GenericPreparedStatement> data;
private GenericPreparedStatement currentPs;
private boolean wasNull;
public StatementCache() throws StandardException {
DiagUtil.checkAccess();
LanguageConnectionContext lcc = (LanguageConnectionContext)
getContextOrNull(LanguageConnectionContext.CONTEXT_ID);
CacheManager statementCache =
lcc.getLanguageConnectionFactory().getStatementCache();
if (statementCache != null) {
final Collection values = statementCache.values();
data = new Vector<GenericPreparedStatement>(values.size());
for (Iterator i = values.iterator(); i.hasNext(); ) {
final CachedStatement cs = (CachedStatement) i.next();
final GenericPreparedStatement ps =
(GenericPreparedStatement) cs.getPreparedStatement();
data.add(ps);
}
}
}
public boolean next() {
if (data == null)
return false;
position++;
for (; position < data.size(); position++) {
currentPs = (GenericPreparedStatement) data.get(position);
if (currentPs != null)
return true;
}
data = null;
return false;
}
public void close() {
data = null;
currentPs = null;
}
public String getString(int colId) {
wasNull = false;
switch (colId) {
case 1:
return currentPs.getObjectName();
case 2:
return ((GenericStatement) currentPs.statement).getCompilationSchema();
case 3:
String sql = currentPs.getSource();
sql = StringUtil.truncate(sql, Limits.DB2_VARCHAR_MAXWIDTH);
return sql;
default:
return null;
}
}
public boolean getBoolean(int colId) {
wasNull = false;
switch (colId) {
case 4:
// was/is UniCode column, but since Derby 10.0 all
// statements are compiled and submitted as UniCode.
return true;
case 5:
return currentPs.isValid();
default:
return false;
}
}
public Timestamp getTimestamp(int colId) {
Timestamp ts = currentPs.getEndCompileTimestamp();
wasNull = (ts == null);
return ts;
}
public boolean wasNull() {
return wasNull;
}
/*
** Metadata
*/
private static final ResultColumnDescriptor[] columnInfo = {
EmbedResultSetMetaData.getResultColumnDescriptor("ID", Types.CHAR, false, 36),
EmbedResultSetMetaData.getResultColumnDescriptor("SCHEMANAME", Types.VARCHAR, true, 128),
EmbedResultSetMetaData.getResultColumnDescriptor("SQL_TEXT", Types.VARCHAR, false, Limits.DB2_VARCHAR_MAXWIDTH),
EmbedResultSetMetaData.getResultColumnDescriptor("UNICODE", Types.BIT, false),
EmbedResultSetMetaData.getResultColumnDescriptor("VALID", Types.BIT, false),
EmbedResultSetMetaData.getResultColumnDescriptor("COMPILED_AT", Types.TIMESTAMP, true),
};
private static final ResultSetMetaData metadata =
new EmbedResultSetMetaData(columnInfo);
public ResultSetMetaData getMetaData() {
return metadata;
}
/**
* Privileged lookup of a Context. Must be private so that user code
* can't call this entry point.
*/
private static Context getContextOrNull( final String contextID )
{
if ( System.getSecurityManager() == null )
{
return ContextService.getContextOrNull( contextID );
}
else
{
return AccessController.doPrivileged
(
new PrivilegedAction<Context>()
{
public Context run()
{
return ContextService.getContextOrNull( contextID );
}
}
);
}
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.concurrent.Semaphore;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.catalog.CatalogTracker;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.zookeeper.ClusterStatusTracker;
import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
/**
* Test the {@link ActiveMasterManager}.
*/
@Category(MediumTests.class)
public class TestActiveMasterManager {
private final static Log LOG = LogFactory.getLog(TestActiveMasterManager.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.startMiniZKCluster();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniZKCluster();
}
@Test public void testRestartMaster() throws IOException, KeeperException {
ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(),
"testActiveMasterManagerFromZK", null, true);
try {
ZKUtil.deleteNode(zk, zk.getMasterAddressZNode());
ZKUtil.deleteNode(zk, zk.clusterStateZNode);
} catch(KeeperException.NoNodeException nne) {}
// Create the master node with a dummy address
ServerName master = new ServerName("localhost", 1, System.currentTimeMillis());
// Should not have a master yet
DummyMaster dummyMaster = new DummyMaster(zk,master);
ClusterStatusTracker clusterStatusTracker =
dummyMaster.getClusterStatusTracker();
ActiveMasterManager activeMasterManager =
dummyMaster.getActiveMasterManager();
assertFalse(activeMasterManager.clusterHasActiveMaster.get());
// First test becoming the active master uninterrupted
MonitoredTask status = Mockito.mock(MonitoredTask.class);
clusterStatusTracker.setClusterUp();
activeMasterManager.blockUntilBecomingActiveMaster(status);
assertTrue(activeMasterManager.clusterHasActiveMaster.get());
assertMaster(zk, master);
// Now pretend master restart
DummyMaster secondDummyMaster = new DummyMaster(zk,master);
ActiveMasterManager secondActiveMasterManager =
secondDummyMaster.getActiveMasterManager();
assertFalse(secondActiveMasterManager.clusterHasActiveMaster.get());
activeMasterManager.blockUntilBecomingActiveMaster(status);
assertTrue(activeMasterManager.clusterHasActiveMaster.get());
assertMaster(zk, master);
}
/**
* Unit tests that uses ZooKeeper but does not use the master-side methods
* but rather acts directly on ZK.
* @throws Exception
*/
@Test
public void testActiveMasterManagerFromZK() throws Exception {
ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(),
"testActiveMasterManagerFromZK", null, true);
try {
ZKUtil.deleteNode(zk, zk.getMasterAddressZNode());
ZKUtil.deleteNode(zk, zk.clusterStateZNode);
} catch(KeeperException.NoNodeException nne) {}
// Create the master node with a dummy address
ServerName firstMasterAddress =
new ServerName("localhost", 1, System.currentTimeMillis());
ServerName secondMasterAddress =
new ServerName("localhost", 2, System.currentTimeMillis());
// Should not have a master yet
DummyMaster ms1 = new DummyMaster(zk,firstMasterAddress);
ActiveMasterManager activeMasterManager =
ms1.getActiveMasterManager();
assertFalse(activeMasterManager.clusterHasActiveMaster.get());
// First test becoming the active master uninterrupted
ClusterStatusTracker clusterStatusTracker =
ms1.getClusterStatusTracker();
clusterStatusTracker.setClusterUp();
activeMasterManager.blockUntilBecomingActiveMaster(
Mockito.mock(MonitoredTask.class));
assertTrue(activeMasterManager.clusterHasActiveMaster.get());
assertMaster(zk, firstMasterAddress);
// New manager will now try to become the active master in another thread
WaitToBeMasterThread t = new WaitToBeMasterThread(zk, secondMasterAddress);
t.start();
// Wait for this guy to figure out there is another active master
// Wait for 1 second at most
int sleeps = 0;
while(!t.manager.clusterHasActiveMaster.get() && sleeps < 100) {
Thread.sleep(10);
sleeps++;
}
// Both should see that there is an active master
assertTrue(activeMasterManager.clusterHasActiveMaster.get());
assertTrue(t.manager.clusterHasActiveMaster.get());
// But secondary one should not be the active master
assertFalse(t.isActiveMaster);
// Close the first server and delete it's master node
ms1.stop("stopping first server");
// Use a listener to capture when the node is actually deleted
NodeDeletionListener listener = new NodeDeletionListener(zk, zk.getMasterAddressZNode());
zk.registerListener(listener);
LOG.info("Deleting master node");
ZKUtil.deleteNode(zk, zk.getMasterAddressZNode());
// Wait for the node to be deleted
LOG.info("Waiting for active master manager to be notified");
listener.waitForDeletion();
LOG.info("Master node deleted");
// Now we expect the secondary manager to have and be the active master
// Wait for 1 second at most
sleeps = 0;
while(!t.isActiveMaster && sleeps < 100) {
Thread.sleep(10);
sleeps++;
}
LOG.debug("Slept " + sleeps + " times");
assertTrue(t.manager.clusterHasActiveMaster.get());
assertTrue(t.isActiveMaster);
LOG.info("Deleting master node");
ZKUtil.deleteNode(zk, zk.getMasterAddressZNode());
}
/**
* Assert there is an active master and that it has the specified address.
* @param zk
* @param thisMasterAddress
* @throws KeeperException
* @throws IOException
*/
private void assertMaster(ZooKeeperWatcher zk,
ServerName expectedAddress)
throws KeeperException, IOException {
ServerName readAddress = MasterAddressTracker.getMasterAddress(zk);
assertNotNull(readAddress);
assertTrue(expectedAddress.equals(readAddress));
}
public static class WaitToBeMasterThread extends Thread {
ActiveMasterManager manager;
DummyMaster dummyMaster;
boolean isActiveMaster;
public WaitToBeMasterThread(ZooKeeperWatcher zk, ServerName address) {
this.dummyMaster = new DummyMaster(zk,address);
this.manager = this.dummyMaster.getActiveMasterManager();
isActiveMaster = false;
}
@Override
public void run() {
manager.blockUntilBecomingActiveMaster(
Mockito.mock(MonitoredTask.class));
LOG.info("Second master has become the active master!");
isActiveMaster = true;
}
}
public static class NodeDeletionListener extends ZooKeeperListener {
private static final Log LOG = LogFactory.getLog(NodeDeletionListener.class);
private Semaphore lock;
private String node;
public NodeDeletionListener(ZooKeeperWatcher watcher, String node) {
super(watcher);
lock = new Semaphore(0);
this.node = node;
}
@Override
public void nodeDeleted(String path) {
if(path.equals(node)) {
LOG.debug("nodeDeleted(" + path + ")");
lock.release();
}
}
public void waitForDeletion() throws InterruptedException {
lock.acquire();
}
}
/**
* Dummy Master Implementation.
*/
public static class DummyMaster implements Server {
private volatile boolean stopped;
private ClusterStatusTracker clusterStatusTracker;
private ActiveMasterManager activeMasterManager;
public DummyMaster(ZooKeeperWatcher zk, ServerName master) {
this.clusterStatusTracker =
new ClusterStatusTracker(zk, this);
clusterStatusTracker.start();
this.activeMasterManager =
new ActiveMasterManager(zk, master, this);
zk.registerListener(activeMasterManager);
}
@Override
public void abort(final String msg, final Throwable t) {}
@Override
public boolean isAborted() {
return false;
}
@Override
public Configuration getConfiguration() {
return null;
}
@Override
public ZooKeeperWatcher getZooKeeper() {
return null;
}
@Override
public ServerName getServerName() {
return null;
}
@Override
public boolean isStopped() {
return this.stopped;
}
@Override
public void stop(String why) {
this.stopped = true;
}
@Override
public CatalogTracker getCatalogTracker() {
return null;
}
public ClusterStatusTracker getClusterStatusTracker() {
return clusterStatusTracker;
}
public ActiveMasterManager getActiveMasterManager() {
return activeMasterManager;
}
}
}
| |
package com.melonlee.ewd.json;
/*
Copyright (c) 2008 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import java.util.Iterator;
/**
* This provides static methods to convert an XML text into a JSONArray or
* JSONObject, and to covert a JSONArray or JSONObject into an XML text using
* the JsonML transform.
*
* @author JSON.org
* @version 2012-03-28
*/
public class JSONML {
/**
* Parse XML values and store them in a JSONArray.
* @param x The XMLTokener containing the source string.
* @param arrayForm true if array form, false if object form.
* @param ja The JSONArray that is containing the current tag or null
* if we are at the outermost level.
* @return A JSONArray if the value is the outermost tag, otherwise null.
* @throws JSONException
*/
private static Object parse(
XMLTokener x,
boolean arrayForm,
JSONArray ja
) throws JSONException {
String attribute;
char c;
String closeTag = null;
int i;
JSONArray newja = null;
JSONObject newjo = null;
Object token;
String tagName = null;
// Test for and skip past these forms:
// <!-- ... -->
// <![ ... ]]>
// <! ... >
// <? ... ?>
while (true) {
if (!x.more()) {
throw x.syntaxError("Bad XML");
}
token = x.nextContent();
if (token == XML.LT) {
token = x.nextToken();
if (token instanceof Character) {
if (token == XML.SLASH) {
// Close tag </
token = x.nextToken();
if (!(token instanceof String)) {
throw new JSONException(
"Expected a closing name instead of '" +
token + "'.");
}
if (x.nextToken() != XML.GT) {
throw x.syntaxError("Misshaped close tag");
}
return token;
} else if (token == XML.BANG) {
// <!
c = x.next();
if (c == '-') {
if (x.next() == '-') {
x.skipPast("-->");
} else {
x.back();
}
} else if (c == '[') {
token = x.nextToken();
if (token.equals("CDATA") && x.next() == '[') {
if (ja != null) {
ja.put(x.nextCDATA());
}
} else {
throw x.syntaxError("Expected 'CDATA['");
}
} else {
i = 1;
do {
token = x.nextMeta();
if (token == null) {
throw x.syntaxError("Missing '>' after '<!'.");
} else if (token == XML.LT) {
i += 1;
} else if (token == XML.GT) {
i -= 1;
}
} while (i > 0);
}
} else if (token == XML.QUEST) {
// <?
x.skipPast("?>");
} else {
throw x.syntaxError("Misshaped tag");
}
// Open tag <
} else {
if (!(token instanceof String)) {
throw x.syntaxError("Bad tagName '" + token + "'.");
}
tagName = (String)token;
newja = new JSONArray();
newjo = new JSONObject();
if (arrayForm) {
newja.put(tagName);
if (ja != null) {
ja.put(newja);
}
} else {
newjo.put("tagName", tagName);
if (ja != null) {
ja.put(newjo);
}
}
token = null;
for (;;) {
if (token == null) {
token = x.nextToken();
}
if (token == null) {
throw x.syntaxError("Misshaped tag");
}
if (!(token instanceof String)) {
break;
}
// attribute = value
attribute = (String)token;
if (!arrayForm && ("tagName".equals(attribute) || "childNode".equals(attribute))) {
throw x.syntaxError("Reserved attribute.");
}
token = x.nextToken();
if (token == XML.EQ) {
token = x.nextToken();
if (!(token instanceof String)) {
throw x.syntaxError("Missing value");
}
newjo.accumulate(attribute, XML.stringToValue((String)token));
token = null;
} else {
newjo.accumulate(attribute, "");
}
}
if (arrayForm && newjo.length() > 0) {
newja.put(newjo);
}
// Empty tag <.../>
if (token == XML.SLASH) {
if (x.nextToken() != XML.GT) {
throw x.syntaxError("Misshaped tag");
}
if (ja == null) {
if (arrayForm) {
return newja;
} else {
return newjo;
}
}
// Content, between <...> and </...>
} else {
if (token != XML.GT) {
throw x.syntaxError("Misshaped tag");
}
closeTag = (String)parse(x, arrayForm, newja);
if (closeTag != null) {
if (!closeTag.equals(tagName)) {
throw x.syntaxError("Mismatched '" + tagName +
"' and '" + closeTag + "'");
}
tagName = null;
if (!arrayForm && newja.length() > 0) {
newjo.put("childNodes", newja);
}
if (ja == null) {
if (arrayForm) {
return newja;
} else {
return newjo;
}
}
}
}
}
} else {
if (ja != null) {
ja.put(token instanceof String
? XML.stringToValue((String)token)
: token);
}
}
}
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONArray using the JsonML transform. Each XML tag is represented as
* a JSONArray in which the first element is the tag name. If the tag has
* attributes, then the second element will be JSONObject containing the
* name/value pairs. If the tag contains children, then strings and
* JSONArrays will represent the child tags.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param string The source string.
* @return A JSONArray containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONArray toJSONArray(String string) throws JSONException {
return toJSONArray(new XMLTokener(string));
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONArray using the JsonML transform. Each XML tag is represented as
* a JSONArray in which the first element is the tag name. If the tag has
* attributes, then the second element will be JSONObject containing the
* name/value pairs. If the tag contains children, then strings and
* JSONArrays will represent the child content and tags.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param x An XMLTokener.
* @return A JSONArray containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONArray toJSONArray(XMLTokener x) throws JSONException {
return (JSONArray)parse(x, true, null);
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONObject using the JsonML transform. Each XML tag is represented as
* a JSONObject with a "tagName" property. If the tag has attributes, then
* the attributes will be in the JSONObject as properties. If the tag
* contains children, the object will have a "childNodes" property which
* will be an array of strings and JsonML JSONObjects.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param x An XMLTokener of the XML source text.
* @return A JSONObject containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONObject toJSONObject(XMLTokener x) throws JSONException {
return (JSONObject)parse(x, false, null);
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONObject using the JsonML transform. Each XML tag is represented as
* a JSONObject with a "tagName" property. If the tag has attributes, then
* the attributes will be in the JSONObject as properties. If the tag
* contains children, the object will have a "childNodes" property which
* will be an array of strings and JsonML JSONObjects.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param string The XML source text.
* @return A JSONObject containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONObject toJSONObject(String string) throws JSONException {
return toJSONObject(new XMLTokener(string));
}
/**
* Reverse the JSONML transformation, making an XML text from a JSONArray.
* @param ja A JSONArray.
* @return An XML string.
* @throws JSONException
*/
public static String toString(JSONArray ja) throws JSONException {
int i;
JSONObject jo;
String key;
Iterator keys;
int length;
Object object;
StringBuffer sb = new StringBuffer();
String tagName;
String value;
// Emit <tagName
tagName = ja.getString(0);
XML.noSpace(tagName);
tagName = XML.escape(tagName);
sb.append('<');
sb.append(tagName);
object = ja.opt(1);
if (object instanceof JSONObject) {
i = 2;
jo = (JSONObject)object;
// Emit the attributes
keys = jo.keys();
while (keys.hasNext()) {
key = keys.next().toString();
XML.noSpace(key);
value = jo.optString(key);
if (value != null) {
sb.append(' ');
sb.append(XML.escape(key));
sb.append('=');
sb.append('"');
sb.append(XML.escape(value));
sb.append('"');
}
}
} else {
i = 1;
}
//Emit content in body
length = ja.length();
if (i >= length) {
sb.append('/');
sb.append('>');
} else {
sb.append('>');
do {
object = ja.get(i);
i += 1;
if (object != null) {
if (object instanceof String) {
sb.append(XML.escape(object.toString()));
} else if (object instanceof JSONObject) {
sb.append(toString((JSONObject)object));
} else if (object instanceof JSONArray) {
sb.append(toString((JSONArray)object));
}
}
} while (i < length);
sb.append('<');
sb.append('/');
sb.append(tagName);
sb.append('>');
}
return sb.toString();
}
/**
* Reverse the JSONML transformation, making an XML text from a JSONObject.
* The JSONObject must contain a "tagName" property. If it has children,
* then it must have a "childNodes" property containing an array of objects.
* The other properties are attributes with string values.
* @param jo A JSONObject.
* @return An XML string.
* @throws JSONException
*/
public static String toString(JSONObject jo) throws JSONException {
StringBuffer sb = new StringBuffer();
int i;
JSONArray ja;
String key;
Iterator keys;
int length;
Object object;
String tagName;
String value;
//Emit <tagName
tagName = jo.optString("tagName");
if (tagName == null) {
return XML.escape(jo.toString());
}
XML.noSpace(tagName);
tagName = XML.escape(tagName);
sb.append('<');
sb.append(tagName);
//Emit the attributes
keys = jo.keys();
while (keys.hasNext()) {
key = keys.next().toString();
if (!"tagName".equals(key) && !"childNodes".equals(key)) {
XML.noSpace(key);
value = jo.optString(key);
if (value != null) {
sb.append(' ');
sb.append(XML.escape(key));
sb.append('=');
sb.append('"');
sb.append(XML.escape(value));
sb.append('"');
}
}
}
//Emit content in body
ja = jo.optJSONArray("childNodes");
if (ja == null) {
sb.append('/');
sb.append('>');
} else {
sb.append('>');
length = ja.length();
for (i = 0; i < length; i += 1) {
object = ja.get(i);
if (object != null) {
if (object instanceof String) {
sb.append(XML.escape(object.toString()));
} else if (object instanceof JSONObject) {
sb.append(toString((JSONObject)object));
} else if (object instanceof JSONArray) {
sb.append(toString((JSONArray)object));
} else {
sb.append(object.toString());
}
}
}
sb.append('<');
sb.append('/');
sb.append(tagName);
sb.append('>');
}
return sb.toString();
}
}
| |
package job;
import global.Global;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import models.twitter.TwitterFeed;
import models.twitter.TwitterRequest;
import models.twitter.TwitterTweet;
import models.twitter.TwitterUser;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import play.Logger;
import twitter4j.RateLimitStatus;
import twitter4j.TwitterException;
import com.google.common.collect.Sets;
import crawler.TwitterCrawler;
public class TwitterJob implements Job {
private TwitterCrawler crawler;
private Map<String, RateLimitStatus> rateLimitStatus;
private String language;
private int languagePercentage;
private int timelineRequests;
Set<Long> userIDs;
Set<String> usernames;
Set<TwitterUser> usersToRescan;
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
Logger.info("FACEBOOK JOB STARTED");
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
languagePercentage = Integer.valueOf(dataMap.getString("languagePercentage"));
language = dataMap.getString("language");
usernames = getSources(dataMap.getString("sourceFile"));
userIDs = TwitterUser.getIds();
usersToRescan = TwitterUser.getUsersToRescan();
Set<String> u2 = Sets.newHashSet();
for (String username : usernames) {
TwitterUser u = TwitterUser.get(username);
if (u != null) {
usersToRescan.add(u);
} else {
u2.add(username);
}
}
usernames = u2;
crawler = Global.getTwitterCrawler();
try {
crawl();
} catch (Exception e) {
e.printStackTrace();
JobExecutionException e2 = new JobExecutionException(e);
e2.setRefireImmediately(true);
throw e2;
}
}
private void crawl() throws TwitterException {
RateLimitStatus timelineLS = null;
do {
try {
rateLimitStatus = crawler.getRateLimitStatus();
timelineLS = rateLimitStatus.get("/statuses/user_timeline");
timelineRequests = timelineLS.getRemaining();
} catch (TwitterException e) {
waitFor(899);
}
} while (timelineLS == null);
long averageTimePost = 0;
TwitterRequest request = null;
Logger.debug(" Limit: " + timelineLS.getLimit());
Logger.debug(" Remaining: " + timelineLS.getRemaining());
Logger.debug(" ResetTimeInSeconds: " + timelineLS.getResetTimeInSeconds());
Logger.debug(" SecondsUntilReset: " + timelineLS.getSecondsUntilReset());
for (String username : usernames) {
System.out.println(" -------- " + username + " -------- ");
Logger.debug("New user: " + username);
if (timelineRequests <= 0) {
waitFor(timelineLS.getSecondsUntilReset());
timelineRequests = timelineLS.getLimit();
}
TwitterUser ttUser = crawler.getUser(username);
request = crawler.getTweets(ttUser, timelineLS.getRemaining());
timelineRequests = request.requestRemaining;
averageTimePost = getAveragePostTime(request.tweets);
Set<TwitterUser> newUsers = persistData(ttUser, request, averageTimePost);
usersToRescan.addAll(newUsers);
}
for (TwitterUser ttUser : usersToRescan) {
if (timelineRequests <= 0) {
waitFor(timelineLS.getSecondsUntilReset());
timelineRequests = timelineLS.getLimit();
}
Logger.debug(" -------- " + ttUser.username + " -------- ");
String log = "Fetching tweets of " + ttUser.username;
Long now = (new Date()).getTime();
if (ttUser.feed == null)
ttUser.feed = new TwitterFeed();
if (!ttUser.feed.totallyScanned) { // if is not complete, get from the older of the list
Logger.debug(log + " because is not totally scanned");
request = crawler.getTweets(ttUser, TwitterTweet.getFirstTweetUnixTime(ttUser.id), Long.MAX_VALUE, timelineRequests);
averageTimePost = getAveragePostTime(request.tweets);
} else if (ttUser.feed.averageTimePost > 0 && now - ttUser.feed.lastTimeScanned > ttUser.feed.averageTimePost) { // if is complete, do a rescan for newer posts
Logger.debug(log + " because was last scanned in " + (now - ttUser.feed.lastTimeScanned) + " and average time post is "
+ ttUser.feed.averageTimePost);
request = crawler.getTweets(ttUser, TwitterTweet.getLastTweetUnixTime(ttUser.id), timelineRequests);
averageTimePost = (getAveragePostTime(request.tweets) + ttUser.feed.averageTimePost) / 2;
} else {
Logger.debug("Skipping " + ttUser.username);
continue;
}
timelineRequests = request.requestRemaining;
Set<TwitterUser> newUsers = persistData(ttUser, request, averageTimePost);
usersToRescan.addAll(newUsers);
}
}
private Set<TwitterUser> persistData(TwitterUser ttUser, TwitterRequest request, long averageTimePost) throws TwitterException {
int usersSaved = 0;
int tweetsSaved = 0;
Set<TwitterUser> usersToRescan = Sets.newHashSet();
List<TwitterTweet> tweets = request.tweets;
ttUser.feed = new TwitterFeed();
ttUser.feed.averageTimePost = averageTimePost;
ttUser.feed.lastTimeScanned = new Date().getTime();
ttUser.feed.totallyScanned = request.gotAllTweets;
ttUser.save();
usersSaved++;
userIDs.add(ttUser.id);
Set<TwitterUser> newUsers = Sets.newHashSet();
for (TwitterTweet tweet : tweets) {
if (tweet.author != null && userIDs.add(tweet.author.id)) {
newUsers.add(tweet.author);
}
tweet.save();
tweetsSaved++;
}
long friendCursor = -1;
long followerCursor = -1;
TwitterRequest reqFriends = null;
TwitterRequest reqFollowers = null;
RateLimitStatus timelineLS = crawler.getRateLimitStatus().get("/statuses/user_timeline");
timelineRequests = timelineLS.getRemaining();
do {
reqFriends = getFriends(ttUser.username, friendCursor);
if (reqFriends != null) {
for (TwitterUser newUser : reqFriends.users) {
if (userIDs.add(newUser.id)) {
newUsers.add(newUser);
}
}
friendCursor = reqFriends.cursor;
}
reqFollowers = getFollowers(ttUser.username, followerCursor);
if (reqFollowers != null) {
for (TwitterUser newUser : reqFollowers.users) {
if (userIDs.add(newUser.id)) {
newUsers.add(newUser);
}
}
followerCursor = reqFollowers.cursor;
}
for (TwitterUser newUser : newUsers) {
TwitterRequest req = null;
do {
try {
Logger.debug("Requests remaining: " + timelineRequests);
if (timelineRequests <= 1) {
timelineLS = crawler.getRateLimitStatus().get("/statuses/user_timeline");
waitFor(timelineLS.getSecondsUntilReset());
timelineRequests = timelineLS.getLimit();
}
req = crawler.getTweets(newUser, 1);
timelineRequests = req.requestRemaining;
tweets = req.tweets;
} catch (TwitterException e) {
if (e.exceededRateLimitation()) {
waitFor(timelineLS.getSecondsUntilReset());
timelineRequests = timelineLS.getLimit();
}
}
} while (req == null);
if (tweets.size() == 0)
continue;
int languageCount = 0;
for (TwitterTweet tweet : tweets) {
if (tweet.language != null && tweet.language.equals(language))
languageCount++;
}
int percentage = (languageCount * 100) / tweets.size();
if (percentage >= languagePercentage) {
newUser.toRescan = true;
newUser.feed = new TwitterFeed();
newUser.feed.lastTimeScanned = new Date().getTime();
newUser.feed.totallyScanned = req.gotAllTweets;
newUser.feed.averageTimePost = getAveragePostTime(tweets);
usersToRescan.add(newUser);
for (TwitterTweet tweet : tweets) {
tweet.save();
tweetsSaved++;
}
} else {
newUser.toRescan = false;
}
newUser.save();
usersSaved++;
}
newUsers.clear();
Logger.debug("users saved: " + usersSaved);
Logger.debug("tweets saved: " + tweetsSaved);
} while ((reqFriends != null && reqFollowers != null) && reqFriends.requestLimitReached || reqFollowers.requestLimitReached);
return usersToRescan;
}
private TwitterRequest getFriends(String username, long cursor) {
TwitterRequest req = null;
int errorCount = 0;
while (errorCount < 10) {
try {
RateLimitStatus ls = crawler.getRateLimitStatus().get("/friends/list");
if (ls.getRemaining() == 0) {
waitFor(ls.getSecondsUntilReset());
}
req = crawler.getFriends(username, cursor, 5);
break;
} catch (TwitterException e) {
errorCount++;
e.printStackTrace();
}
}
return req;
}
private TwitterRequest getFollowers(String username, long cursor) {
TwitterRequest req = null;
int errorCount = 0;
while (errorCount < 10) {
try {
RateLimitStatus ls = crawler.getRateLimitStatus().get("/followers/list");
if (ls.getRemaining() == 0) {
waitFor(ls.getSecondsUntilReset());
}
req = crawler.getFollowers(username, cursor, 5);
break;
} catch (TwitterException e) {
errorCount++;
e.printStackTrace();
}
}
return req;
}
private void waitFor(int seconds) {
Logger.info("Waiting for " + seconds + " seconds until twitter rate limit resets");
try {
Thread.sleep(seconds * 1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private Long getAveragePostTime(List<TwitterTweet> tweets) {
if (tweets == null || tweets.size() == 0)
return 0l;
Long averagePostTime = 0l;
long a = new Date().getTime();
for (int i = 0; i < tweets.size() - 1; i++) {
long b = tweets.get(i + 1).createdTime.getTime();
averagePostTime += a - b;
a = b;
}
averagePostTime /= tweets.size();
return averagePostTime;
}
private Set<String> getSources(String sourceFile) {
Set<String> sources = Sets.newHashSet();
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(play.Play.application().getFile(sourceFile)));
String line = null;
while ((line = reader.readLine()) != null) {
sources.add(line);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return sources;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hive.hcatalog.templeton.tool;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLDecoder;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.hive.common.LogUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hive.hcatalog.templeton.UgiFactory;
/**
* General utility methods.
*/
public class TempletonUtils {
private static final Logger LOG = LoggerFactory.getLogger(TempletonUtils.class);
/**
* Is the object non-empty?
*/
public static boolean isset(String s) {
return (s != null) && (s.length() > 0);
}
/**
* Is the object non-empty?
*/
public static boolean isset(char ch) {
return (ch != 0);
}
/**
* Is the object non-empty?
*/
public static <T> boolean isset(T[] a) {
return (a != null) && (a.length > 0);
}
/**
* Is the object non-empty?
*/
public static <T> boolean isset(Collection<T> col) {
return (col != null) && (!col.isEmpty());
}
/**
* Is the object non-empty?
*/
public static <K, V> boolean isset(Map<K, V> col) {
return (col != null) && (!col.isEmpty());
}
//looking for map 100% reduce 100%
public static final Pattern JAR_COMPLETE = Pattern.compile(" map \\d+%\\s+reduce \\d+%$");
public static final Pattern PIG_COMPLETE = Pattern.compile(" \\d+% complete$");
//looking for map = 100%, reduce = 100%
public static final Pattern HIVE_COMPLETE = Pattern.compile(" map = (\\d+%),\\s+reduce = (\\d+%).*$");
/**
* Hive on Tez produces progress report that looks like this
* Map 1: -/- Reducer 2: 0/1
* Map 1: -/- Reducer 2: 0(+1)/1
* Map 1: -/- Reducer 2: 1/1
*
* -/- means there are no tasks (yet)
* 0/1 means 1 total tasks, 0 completed
* 1(+2)/3 means 3 total, 1 completed and 2 running
*
* HIVE-8495, in particular https://issues.apache.org/jira/secure/attachment/12675504/Screen%20Shot%202014-10-16%20at%209.35.26%20PM.png
* has more examples.
* To report progress, we'll assume all tasks are equal size and compute "completed" as percent of "total"
* "(Map|Reducer) (\\d+:) ((-/-)|(\\d+(\\(\\+\\d+\\))?/\\d+))" is the complete pattern but we'll drop "-/-" to exclude
* groups that don't add information such as "Map 1: -/-"
*/
public static final Pattern HIVE_TEZ_COMPLETE = Pattern.compile("(Map|Reducer) (\\d+:) (\\d+(\\(\\+\\d+\\))?/\\d+)");
public static final Pattern HIVE_BEELINE_COMPLETE = Pattern.compile("VERTICES: .* (\\d+%)");
/**
* Pig on Tez produces progress report that looks like this
* DAG Status: status=RUNNING, progress=TotalTasks: 3 Succeeded: 0 Running: 0 Failed: 0 Killed: 0
*
* Use Succeeded/TotalTasks to report progress
* There is a hole as Pig might launch more than one DAGs. If this happens, user might
* see progress rewind since the percentage is for the new DAG. To fix this, We need to fix
* Pig print total number of DAGs on console, and track complete DAGs in WebHCat.
*/
public static final Pattern PIG_TEZ_COMPLETE = Pattern.compile("progress=TotalTasks: (\\d+) Succeeded: (\\d+)");
public static final Pattern TEZ_COUNTERS = Pattern.compile("\\d+");
/**
* Extract the percent complete line from Pig or Jar jobs.
*/
public static String extractPercentComplete(String line) {
Matcher jar = JAR_COMPLETE.matcher(line);
if (jar.find())
return jar.group().trim();
Matcher pig = PIG_COMPLETE.matcher(line);
if (pig.find())
return pig.group().trim();
Matcher beeline = HIVE_BEELINE_COMPLETE.matcher(line);
if (beeline.find()) {
return beeline.group(1).trim() + " complete";
}
Matcher hive = HIVE_COMPLETE.matcher(line);
if(hive.find()) {
return "map " + hive.group(1) + " reduce " + hive.group(2);
}
Matcher hiveTez = HIVE_TEZ_COMPLETE.matcher(line);
if(hiveTez.find()) {
int totalTasks = 0;
int completedTasks = 0;
do {
//here each group looks something like "Map 2: 2/4" "Reducer 3: 1(+2)/4"
//just parse the numbers and ignore one from "Map 2" and from "(+2)" if it's there
Matcher counts = TEZ_COUNTERS.matcher(hiveTez.group());
List<String> items = new ArrayList<String>(4);
while(counts.find()) {
items.add(counts.group());
}
completedTasks += Integer.parseInt(items.get(1));
if(items.size() == 3) {
totalTasks += Integer.parseInt(items.get(2));
}
else {
totalTasks += Integer.parseInt(items.get(3));
}
} while(hiveTez.find());
if(totalTasks == 0) {
return "0% complete (0 total tasks)";
}
return completedTasks * 100 / totalTasks + "% complete";
}
Matcher pigTez = PIG_TEZ_COMPLETE.matcher(line);
if(pigTez.find()) {
int totalTasks = Integer.parseInt(pigTez.group(1));
int completedTasks = Integer.parseInt(pigTez.group(2));
if(totalTasks == 0) {
return "0% complete (0 total tasks)";
}
return completedTasks * 100 / totalTasks + "% complete";
}
return null;
}
public static final Pattern JAR_ID = Pattern.compile(" Running job: (\\S+)$");
public static final Pattern PIG_ID = Pattern.compile(" HadoopJobId: (\\S+)$");
public static final Pattern[] ID_PATTERNS = {JAR_ID, PIG_ID};
/**
* Extract the job id from jar jobs.
*/
public static String extractChildJobId(String line) {
for (Pattern p : ID_PATTERNS) {
Matcher m = p.matcher(line);
if (m.find())
return m.group(1);
}
return null;
}
/**
* Take an array of strings and encode it into one string.
*/
public static String encodeArray(String[] plain) {
if (plain == null)
return null;
String[] escaped = new String[plain.length];
for (int i = 0; i < plain.length; ++i) {
if (plain[i] == null) {
plain[i] = "";
}
escaped[i] = StringUtils.escapeString(plain[i]);
}
return StringUtils.arrayToString(escaped);
}
/**
* Encode a List into a string.
*/
public static String encodeArray(List<String> list) {
if (list == null)
return null;
String[] array = new String[list.size()];
return encodeArray(list.toArray(array));
}
/**
* Take an encode strings and decode it into an array of strings.
*/
public static String[] decodeArray(String s) {
if (s == null)
return null;
String[] escaped = StringUtils.split(s);
String[] plain = new String[escaped.length];
for (int i = 0; i < escaped.length; ++i)
plain[i] = StringUtils.unEscapeString(escaped[i]);
return plain;
}
public static String[] hadoopFsListAsArray(String files, Configuration conf,
String user)
throws URISyntaxException, FileNotFoundException, IOException,
InterruptedException {
if (files == null || conf == null) {
return null;
}
String[] dirty = files.split(",");
String[] clean = new String[dirty.length];
for (int i = 0; i < dirty.length; ++i)
clean[i] = hadoopFsFilename(dirty[i], conf, user);
return clean;
}
public static String hadoopFsListAsString(String files, Configuration conf,
String user)
throws URISyntaxException, FileNotFoundException, IOException,
InterruptedException {
if (files == null || conf == null) {
return null;
}
return StringUtils.arrayToString(hadoopFsListAsArray(files, conf, user));
}
public static String hadoopFsFilename(String fname, Configuration conf, String user)
throws URISyntaxException, FileNotFoundException, IOException,
InterruptedException {
Path p = hadoopFsPath(fname, conf, user);
if (p == null)
return null;
else
return p.toString();
}
/**
* Returns all files (non-recursive) in {@code dirName}
*/
public static List<Path> hadoopFsListChildren(String dirName, Configuration conf, String user)
throws URISyntaxException, IOException, InterruptedException {
Path p = hadoopFsPath(dirName, conf, user);
FileSystem fs = p.getFileSystem(conf);
if(!fs.exists(p)) {
return Collections.emptyList();
}
FileStatus[] children = fs.listStatus(p);
if(!isset(children)) {
return Collections.emptyList();
}
List<Path> files = new ArrayList<Path>();
for(FileStatus stat : children) {
files.add(stat.getPath());
}
return files;
}
/**
* @return true iff we are sure the file is not there.
*/
public static boolean hadoopFsIsMissing(FileSystem fs, Path p) {
try {
return !fs.exists(p);
} catch (Throwable t) {
// Got an error, might be there anyway due to a
// permissions problem.
return false;
}
}
public static String addUserHomeDirectoryIfApplicable(String origPathStr, String user)
throws IOException, URISyntaxException {
if(origPathStr == null || origPathStr.isEmpty()) {
return "/user/" + user;
}
Path p = new Path(origPathStr);
if(p.isAbsolute()) {
return origPathStr;
}
if(p.toUri().getPath().isEmpty()) {
//origPathStr="hdfs://host:99" for example
return new Path(p.toUri().getScheme(), p.toUri().getAuthority(), "/user/" + user).toString();
}
//can't have relative path if there is scheme/authority
return "/user/" + user + "/" + origPathStr;
}
public static Path hadoopFsPath(String fname, final Configuration conf, String user)
throws URISyntaxException, IOException, InterruptedException {
if (fname == null || conf == null) {
return null;
}
UserGroupInformation ugi;
if (user!=null) {
ugi = UgiFactory.getUgi(user);
} else {
ugi = UserGroupInformation.getLoginUser();
}
final String finalFName = new String(fname);
final FileSystem defaultFs =
ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
@Override
public FileSystem run()
throws URISyntaxException, IOException, InterruptedException {
return FileSystem.get(new URI(finalFName), conf);
}
});
fname = addUserHomeDirectoryIfApplicable(fname, user);
URI u = new URI(fname);
Path p = new Path(u).makeQualified(defaultFs);
if (hadoopFsIsMissing(defaultFs, p))
throw new FileNotFoundException("File " + fname + " does not exist.");
FileSystem.closeAllForUGI(ugi);
return p;
}
/**
* GET the given url. Returns the number of bytes received.
*/
public static int fetchUrl(URL url)
throws IOException {
URLConnection cnx = url.openConnection();
InputStream in = cnx.getInputStream();
byte[] buf = new byte[8192];
int total = 0;
int len = 0;
while ((len = in.read(buf)) >= 0)
total += len;
return total;
}
/**
* Set the environment variables to specify the hadoop user.
*/
public static Map<String, String> hadoopUserEnv(String user,
String overrideClasspath) {
HashMap<String, String> env = new HashMap<String, String>();
env.put("HADOOP_USER_NAME", user);
if (overrideClasspath != null) {
env.put("HADOOP_USER_CLASSPATH_FIRST", "true");
String cur = System.getenv("HADOOP_CLASSPATH");
if (TempletonUtils.isset(cur))
overrideClasspath = overrideClasspath + ":" + cur;
env.put("HADOOP_CLASSPATH", overrideClasspath);
}
return env;
}
/**
* replaces all occurrences of "\," with ","; returns {@code s} if no modifications needed
*/
public static String unEscapeString(String s) {
return s != null && s.contains("\\,") ? StringUtils.unEscapeString(s) : s;
}
/**
* Find a jar that contains a class of the same name and which
* file name matches the given pattern.
*
* @param clazz the class to find.
* @param fileNamePattern regex pattern that must match the jar full path
* @return a jar file that contains the class, or null
*/
public static String findContainingJar(Class<?> clazz, String fileNamePattern) {
ClassLoader loader = clazz.getClassLoader();
String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
try {
for(final Enumeration<URL> itr = loader.getResources(classFile);
itr.hasMoreElements();) {
final URL url = itr.nextElement();
if ("jar".equals(url.getProtocol())) {
String toReturn = url.getPath();
if (fileNamePattern == null || toReturn.matches(fileNamePattern)) {
toReturn = URLDecoder.decode(toReturn, "UTF-8");
return toReturn.replaceAll("!.*$", "");
}
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return null;
}
public static StringBuilder dumpPropMap(String header, Properties props) {
Map<String, String> map = new HashMap<String, String>();
for(Map.Entry<Object, Object> ent : props.entrySet()) {
map.put(ent.getKey().toString(), ent.getValue() == null ? null : ent.getValue().toString());
}
return dumpPropMap(header, map);
}
public static StringBuilder dumpPropMap(String header, Map<String, String> map) {
StringBuilder sb = new StringBuilder("START").append(header).append(":\n");
List<String> propKeys = new ArrayList<String>(map.keySet());
Collections.sort(propKeys);
for(String propKey : propKeys) {
if(propKey.toLowerCase().contains("path")) {
StringTokenizer st = new StringTokenizer(map.get(propKey), File.pathSeparator);
if(st.countTokens() > 1) {
sb.append(propKey).append("=\n");
while (st.hasMoreTokens()) {
sb.append(" ").append(st.nextToken()).append(File.pathSeparator).append('\n');
}
}
else {
sb.append(propKey).append('=').append(map.get(propKey)).append('\n');
}
}
else {
sb.append(propKey).append('=').append(LogUtils.maskIfPassword(propKey, map.get(propKey)));
sb.append('\n');
}
}
return sb.append("END").append(header).append('\n');
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.popup.tree;
import com.intellij.icons.AllIcons;
import com.intellij.ide.util.treeView.AlphaComparator;
import com.intellij.ide.util.treeView.NodeRenderer;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.PopupStep;
import com.intellij.openapi.ui.popup.TreePopup;
import com.intellij.openapi.ui.popup.TreePopupStep;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.popup.WizardPopup;
import com.intellij.ui.treeStructure.SimpleTree;
import com.intellij.ui.treeStructure.filtered.FilteringTreeBuilder;
import com.intellij.ui.treeStructure.filtered.FilteringTreeStructure;
import com.intellij.util.Range;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import java.awt.*;
import java.awt.event.*;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
public class TreePopupImpl extends WizardPopup implements TreePopup {
private MyTree myWizardTree;
private MouseMotionListener myMouseMotionListener;
private MouseListener myMouseListener;
private final List<TreePath> mySavedExpanded = new ArrayList<TreePath>();
private TreePath mySavedSelected;
private TreePath myShowingChildPath;
private TreePath myPendingChildPath;
private FilteringTreeBuilder myBuilder;
public TreePopupImpl(JBPopup parent, @NotNull TreePopupStep aStep, Object parentValue) {
super(parent, aStep);
setParentValue(parentValue);
}
public TreePopupImpl(@NotNull TreePopupStep aStep) {
this(null, aStep, null);
}
@Override
protected JComponent createContent() {
myWizardTree = new MyTree();
myWizardTree.getAccessibleContext().setAccessibleName("WizardTree");
myBuilder = new FilteringTreeBuilder(myWizardTree, this, getTreeStep().getStructure(), AlphaComparator.INSTANCE) {
@Override
protected boolean isSelectable(final Object nodeObject) {
return getTreeStep().isSelectable(nodeObject, nodeObject);
}
};
myBuilder.updateFromRoot();
myWizardTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
Action action = myWizardTree.getActionMap().get("toggleSelectionPreserveAnchor");
if (action != null) {
action.setEnabled(false);
}
myWizardTree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_SPACE) {
toggleExpansion(myWizardTree.getAnchorSelectionPath());
}
}
});
myWizardTree.setRootVisible(getTreeStep().isRootVisible());
myWizardTree.setShowsRootHandles(true);
ToolTipManager.sharedInstance().registerComponent(myWizardTree);
myWizardTree.setCellRenderer(new MyRenderer());
myMouseMotionListener = new MyMouseMotionListener();
myMouseListener = new MyMouseListener();
registerAction("select", KeyEvent.VK_ENTER, 0, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
handleSelect(true, null);
}
});
registerAction("toggleExpansion", KeyEvent.VK_SPACE, 0, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
toggleExpansion(myWizardTree.getSelectionPath());
}
});
final Action oldExpandAction = getActionMap().get("selectChild");
getActionMap().put("selectChild", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
final TreePath path = myWizardTree.getSelectionPath();
if (path != null && 0 == myWizardTree.getModel().getChildCount(path.getLastPathComponent())) {
handleSelect(false, null);
return;
}
oldExpandAction.actionPerformed(e);
}
});
final Action oldCollapseAction = getActionMap().get("selectParent");
getActionMap().put("selectParent", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
final TreePath path = myWizardTree.getSelectionPath();
if (shouldHidePopup(path)) {
goBack();
return;
}
oldCollapseAction.actionPerformed(e);
}
});
return myWizardTree;
}
private boolean shouldHidePopup(TreePath path) {
if (getParent() == null) return false;
if (path == null) return false;
if (!myWizardTree.isCollapsed(path)) return false;
if (myWizardTree.isRootVisible()) {
return path.getPathCount() == 1;
}
return path.getPathCount() == 2;
}
@Override
protected ActionMap getActionMap() {
return myWizardTree.getActionMap();
}
@Override
protected InputMap getInputMap() {
return myWizardTree.getInputMap();
}
private void addListeners() {
myWizardTree.addMouseMotionListener(myMouseMotionListener);
myWizardTree.addMouseListener(myMouseListener);
}
@Override
public void dispose() {
mySavedExpanded.clear();
final Enumeration<TreePath> expanded = myWizardTree.getExpandedDescendants(new TreePath(myWizardTree.getModel().getRoot()));
if (expanded != null) {
while (expanded.hasMoreElements()) {
mySavedExpanded.add(expanded.nextElement());
}
}
mySavedSelected = myWizardTree.getSelectionPath();
myWizardTree.removeMouseMotionListener(myMouseMotionListener);
myWizardTree.removeMouseListener(myMouseListener);
super.dispose();
}
@Override
protected boolean beforeShow() {
addListeners();
expandAll();
collapseAll();
restoreExpanded();
if (mySavedSelected != null) {
myWizardTree.setSelectionPath(mySavedSelected);
}
return super.beforeShow();
}
@Override
protected void afterShow() {
selectFirstSelectableItem();
}
// TODO: not-tested code:
private void selectFirstSelectableItem() {
for (int i = 0; i < myWizardTree.getRowCount(); i++) {
TreePath path = myWizardTree.getPathForRow(i);
if (getTreeStep().isSelectable(path.getLastPathComponent(), extractUserObject(path.getLastPathComponent()))) {
myWizardTree.setSelectionPath(path);
break;
}
}
}
private void restoreExpanded() {
if (mySavedExpanded.isEmpty()) {
expandAll();
return;
}
for (TreePath each : mySavedExpanded) {
myWizardTree.expandPath(each);
}
}
private void expandAll() {
for (int i = 0; i < myWizardTree.getRowCount(); i++) {
myWizardTree.expandRow(i);
}
}
private void collapseAll() {
int row = myWizardTree.getRowCount() - 1;
while (row > 0) {
myWizardTree.collapseRow(row);
row--;
}
}
private TreePopupStep getTreeStep() {
return (TreePopupStep) myStep;
}
private class MyMouseMotionListener extends MouseMotionAdapter {
private Point myLastMouseLocation;
private boolean isMouseMoved(Point location) {
if (myLastMouseLocation == null) {
myLastMouseLocation = location;
return false;
}
return !myLastMouseLocation.equals(location);
}
@Override
public void mouseMoved(MouseEvent e) {
if (!isMouseMoved(e.getLocationOnScreen())) return;
final TreePath path = getPath(e);
if (path != null) {
myWizardTree.setSelectionPath(path);
notifyParentOnChildSelection();
if (getTreeStep().isSelectable(path.getLastPathComponent(), extractUserObject(path.getLastPathComponent()))) {
myWizardTree.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
if (myPendingChildPath == null || !myPendingChildPath.equals(path)) {
myPendingChildPath = path;
restartTimer();
}
return;
}
}
myWizardTree.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
}
}
private TreePath getPath(MouseEvent e) {
return myWizardTree.getClosestPathForLocation(e.getPoint().x, e.getPoint().y);
}
private class MyMouseListener extends MouseAdapter {
@Override
public void mousePressed(MouseEvent e) {
final TreePath path = getPath(e);
if (path == null) {
return;
}
if (e.getButton() != MouseEvent.BUTTON1) {
return;
}
final Object selected = path.getLastPathComponent();
if (getTreeStep().isSelectable(selected, extractUserObject(selected))) {
handleSelect(true, e);
}
else {
if (!isLocationInExpandControl(myWizardTree, path, e.getPoint().x, e.getPoint().y)) {
toggleExpansion(path);
}
}
}
@Override
public void mouseReleased(MouseEvent e) {
}
}
private void toggleExpansion(TreePath path) {
if (path == null) {
return;
}
if (getTreeStep().isSelectable(path.getLastPathComponent(), extractUserObject(path.getLastPathComponent()))) {
if (myWizardTree.isExpanded(path)) {
myWizardTree.collapsePath(path);
}
else {
myWizardTree.expandPath(path);
}
}
}
private void handleSelect(boolean handleFinalChoices, MouseEvent e) {
final boolean pathIsAlreadySelected = myShowingChildPath != null && myShowingChildPath.equals(myWizardTree.getSelectionPath());
if (pathIsAlreadySelected) return;
myPendingChildPath = null;
Object selected = myWizardTree.getLastSelectedPathComponent();
if (selected != null) {
final Object userObject = extractUserObject(selected);
if (getTreeStep().isSelectable(selected, userObject)) {
disposeChildren();
final boolean hasNextStep = myStep.hasSubstep(userObject);
if (!hasNextStep && !handleFinalChoices) {
myShowingChildPath = null;
return;
}
final PopupStep queriedStep = myStep.onChosen(userObject, handleFinalChoices);
if (queriedStep == PopupStep.FINAL_CHOICE || !hasNextStep) {
setFinalRunnable(myStep.getFinalRunnable());
setOk(true);
disposeAllParents(e);
}
else {
myShowingChildPath = myWizardTree.getSelectionPath();
handleNextStep(queriedStep, myShowingChildPath);
myShowingChildPath = null;
}
}
}
}
private void handleNextStep(PopupStep nextStep, Object parentValue) {
final Rectangle pathBounds = myWizardTree.getPathBounds(myWizardTree.getSelectionPath());
final Point point = new RelativePoint(myWizardTree, new Point(getContent().getWidth() + 2, (int) pathBounds.getY())).getScreenPoint();
myChild = createPopup(this, nextStep, parentValue);
myChild.show(getContent(), point.x - STEP_X_PADDING, point.y, true);
}
private class MyRenderer extends NodeRenderer {
@Override
public void customizeCellRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
final boolean shouldPaintSelected = (getTreeStep().isSelectable(value, extractUserObject(value)) && selected) || (getTreeStep().isSelectable(value, extractUserObject(value)) && hasFocus);
final boolean shouldPaintFocus =
!getTreeStep().isSelectable(value, extractUserObject(value)) && selected || shouldPaintSelected || hasFocus;
super.customizeCellRenderer(tree, value, shouldPaintSelected, expanded, leaf, row, shouldPaintFocus);
}
}
private static boolean isLocationInExpandControl(JTree aTree, TreePath path, int mouseX, int mouseY) {
Range<Integer> box = TreeUtil.getExpandControlRange(aTree, path);
return box != null && box.isWithin(mouseX);
}
@Override
protected void process(KeyEvent aEvent) {
myWizardTree.processKeyEvent(aEvent);
}
private Object extractUserObject(Object aNode) {
Object object = ((DefaultMutableTreeNode) aNode).getUserObject();
if (object instanceof FilteringTreeStructure.FilteringNode) {
return ((FilteringTreeStructure.FilteringNode) object).getDelegate();
}
return object;
}
private class MyTree extends SimpleTree {
@Override
public void processKeyEvent(KeyEvent e) {
e.setSource(this);
super.processKeyEvent(e);
}
@Override
public Dimension getPreferredSize() {
final Dimension pref = super.getPreferredSize();
return new Dimension(pref.width + 10, pref.height);
}
@Override
protected void paintChildren(Graphics g) {
super.paintChildren(g);
Rectangle visibleRect = getVisibleRect();
int rowForLocation = getClosestRowForLocation(0, visibleRect.y);
for (int i = rowForLocation; i < rowForLocation + TreeUtil.getVisibleRowCount(this) + 1; i++) {
final TreePath eachPath = getPathForRow(i);
if (eachPath == null) continue;
final Object lastPathComponent = eachPath.getLastPathComponent();
final boolean hasNextStep = getTreeStep().hasSubstep(extractUserObject(lastPathComponent));
if (!hasNextStep) continue;
Icon icon = isPathSelected(eachPath) ?
AllIcons.Icons.Ide.NextStep :
AllIcons.Icons.Ide.NextStepGrayed;
final Rectangle rec = getPathBounds(eachPath);
int x = getSize().width - icon.getIconWidth() - 1;
int y = rec.y + (rec.height - icon.getIconWidth()) / 2;
icon.paintIcon(this, g, x, y);
}
}
}
private Project getProject() {
return getTreeStep().getProject();
}
@Override
protected void onAutoSelectionTimer() {
handleSelect(false, null);
}
@Override
protected JComponent getPreferredFocusableComponent() {
return myWizardTree;
}
@Override
protected void onSpeedSearchPatternChanged() {
myBuilder.refilter();
}
@Override
protected void onChildSelectedFor(Object value) {
TreePath path = (TreePath) value;
if (myWizardTree.getSelectionPath() != path) {
myWizardTree.setSelectionPath(path);
}
}
@Override
public boolean isModalContext() {
return true;
}
}
| |
package com.netflix.astyanax.cql.test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.annotations.Component;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.serializers.AnnotatedCompositeSerializer;
import com.netflix.astyanax.serializers.IntegerSerializer;
public class CompositeColumnTests extends KeyspaceTests {
private static AnnotatedCompositeSerializer<Population> compSerializer = new AnnotatedCompositeSerializer<Population>(Population.class);
private static ColumnFamily<Integer, Population> CF_POPULATION =
new ColumnFamily<Integer, Population>("population", IntegerSerializer.get(), compSerializer, IntegerSerializer.get());
@BeforeClass
public static void init() throws Exception {
initContext();
keyspace.createColumnFamily(CF_POPULATION, null);
CF_POPULATION.describe(keyspace);
}
@AfterClass
public static void teardown() throws Exception {
keyspace.dropColumnFamily(CF_POPULATION);
}
@Test
public void runAllTests() throws Exception {
boolean rowDeleted = false;
populateRowsForCFPopulation();
Thread.sleep(1000);
/** READ SINGLE ROW QUERIES */
testReadSingleRowAllColumns(rowDeleted);
testReadSingleRowSingleColumn(rowDeleted);
testReadSingleRowColumnRange(rowDeleted);
/** READ ROW SLICE WITH ROW KEYS */
testReadMultipleRowKeysWithAllColumns(rowDeleted);
testReadMultipleRowKeysWithColumnRange(rowDeleted);
/** READ ROW SLICE WITH ROWS RANGE */
testReadRowRangeWithAllColumns(rowDeleted);
testReadRowRangeWithColumnRange(rowDeleted);
/** ALL ROW COUNT QUERIES */
testReadSingleRowAllColumnsWithColumnCount(rowDeleted);
testReadSingleRowColumnRangeWithColumnCount(rowDeleted);
testReadMultipleRowKeysAllColumnsWithColumnCount(rowDeleted);
testReadMultipleRowKeysColumnRangeWithColumnCount(rowDeleted);
testReadRowRangeAllColumnsWithColumnCount(rowDeleted);
testReadRowRangeColumnRangeWithColumnCount(rowDeleted);
deleteRowsForCFPopulation();
Thread.sleep(1000);
rowDeleted = true;
/** READ SINGLE ROW QUERIES */
testReadSingleRowAllColumns(rowDeleted);
testReadSingleRowSingleColumn(rowDeleted);
testReadSingleRowColumnRange(rowDeleted);
/** READ ROW SLICE WITH ROW KEYS */
testReadMultipleRowKeysWithAllColumns(rowDeleted);
testReadMultipleRowKeysWithColumnRange(rowDeleted);
/** READ ROW SLICE WITH ROWS RANGE */
testReadRowRangeWithAllColumns(rowDeleted);
testReadRowRangeWithColumnRange(rowDeleted);
/** ALL ROW COUNT QUERIES */
testReadSingleRowAllColumnsWithColumnCount(rowDeleted);
testReadSingleRowColumnRangeWithColumnCount(rowDeleted);
testReadMultipleRowKeysAllColumnsWithColumnCount(rowDeleted);
testReadMultipleRowKeysColumnRangeWithColumnCount(rowDeleted);
testReadRowRangeAllColumnsWithColumnCount(rowDeleted);
testReadRowRangeColumnRangeWithColumnCount(rowDeleted);
}
private void populateRowsForCFPopulation() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
Random random = new Random();
for (int year = 2001; year <= 2014; year++) {
m.withRow(CF_POPULATION, year)
.putColumn(NewYork.clone(), random.nextInt(25000))
.putColumn(SanDiego.clone(), random.nextInt(25000))
.putColumn(SanFrancisco.clone(), random.nextInt(25000))
.putColumn(Seattle.clone(), random.nextInt(25000));
}
m.execute();
}
private void deleteRowsForCFPopulation() throws Exception {
MutationBatch m = keyspace.prepareMutationBatch();
for (int year = 2001; year <= 2014; year ++) {
m.withRow(CF_POPULATION, year).delete();
}
m.execute();
}
private void testReadSingleRowAllColumns(boolean rowDeleted) throws Exception {
for (int year = 2001; year <= 2014; year++) {
ColumnList<Population> result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
continue;
} else {
checkResult(result, SanDiego, SanFrancisco, NewYork, Seattle);
}
}
}
private void testReadSingleRowSingleColumn(boolean rowDeleted) throws Exception {
for (int year = 2001; year <= 2014; year++) {
Column<Population> result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.getColumn(SanFrancisco.clone())
.execute().getResult();
if (rowDeleted) {
Assert.assertNull(result);
continue;
} else {
Assert.assertTrue(result.hasValue());
}
Assert.assertEquals(SanFrancisco, result.getName());
}
}
private void testReadSingleRowColumnRange(boolean rowDeleted) throws Exception {
AnnotatedCompositeSerializer<Population> compSerializer = new AnnotatedCompositeSerializer<Population>(Population.class);
for (int year = 2001; year <= 2001; year++) {
ColumnList<Population> result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
continue;
} else {
checkResult(result, SanDiego, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
continue;
} else {
checkResult(result, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
continue;
} else {
checkResult(result, Seattle);
}
}
}
private void testReadMultipleRowKeysWithAllColumns(boolean rowDeleted) throws Exception {
Rows<Integer, Population> result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, 2001, 5, SanDiego, SanFrancisco, NewYork, Seattle);
}
}
private void testReadMultipleRowKeysWithColumnRange(boolean rowDeleted) throws Exception {
Rows<Integer, Population> result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, 2001, 5, SanDiego, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, 2001, 5, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, 2001, 5, Seattle);
}
}
private void testReadRowRangeWithAllColumns(boolean rowDeleted) throws Exception {
List<TestRange> testRanges = getTestRanges();
for (TestRange testRange : testRanges) {
Rows<Integer, Population> result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, testRange.expectedRowKeys, SanDiego, SanFrancisco, NewYork, Seattle);
}
}
}
private void testReadRowRangeWithColumnRange(boolean rowDeleted) throws Exception {
List<TestRange> testRanges = getTestRanges();
for (TestRange testRange : testRanges) {
Rows<Integer, Population> result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, testRange.expectedRowKeys, SanDiego, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, testRange.expectedRowKeys, SanFrancisco);
}
result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.execute().getResult();
if (rowDeleted) {
Assert.assertTrue(result.isEmpty());
} else {
checkRowResult(result, testRange.expectedRowKeys, Seattle);
}
}
}
/** ALL COLUMN COUNT QUERIES */
private void testReadSingleRowAllColumnsWithColumnCount(boolean rowDeleted) throws Exception {
for (int year = 2001; year <= 2014; year++) {
Integer result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.getCount()
.execute().getResult();
int expected = rowDeleted ? 0 : 4;
Assert.assertTrue(expected == result.intValue());
}
}
private void testReadSingleRowColumnRangeWithColumnCount(boolean rowDeleted) throws Exception {
for (int year = 2001; year <= 2014; year++) {
Integer result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.getCount()
.execute().getResult();
int expected = rowDeleted ? 0 : 2;
Assert.assertTrue(expected == result.intValue());
result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.getCount()
.execute().getResult();
expected = rowDeleted ? 0 : 1;
Assert.assertTrue(expected == result.intValue());
result = keyspace.prepareQuery(CF_POPULATION)
.getRow(year)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.getCount()
.execute().getResult();
expected = rowDeleted ? 0 : 1;
Assert.assertTrue(expected == result.intValue());
}
}
private void testReadMultipleRowKeysAllColumnsWithColumnCount(boolean rowDeleted) throws Exception {
Map<Integer, Integer> result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.getColumnCounts()
.execute().getResult();
Map<Integer, Integer> expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (int year = 2001; year<= 2005; year++) {
expected.put(year, 4);
}
}
Assert.assertEquals(expected, result);
}
private void testReadMultipleRowKeysColumnRangeWithColumnCount(boolean rowDeleted) throws Exception {
Map<Integer, Integer> result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.getColumnCounts()
.execute().getResult();
Map<Integer, Integer> expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey = 2001; rowKey<=2005; rowKey++) {
expected.put(rowKey, 2);
}
}
Assert.assertEquals(expected, result);
result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.getColumnCounts()
.execute().getResult();
expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey = 2001; rowKey<=2005; rowKey++) {
expected.put(rowKey, 1);
}
}
Assert.assertEquals(expected, result);
result = keyspace.prepareQuery(CF_POPULATION)
.getKeySlice(2001, 2002, 2003, 2004, 2005)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.getColumnCounts()
.execute().getResult();
expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey = 2001; rowKey<=2005; rowKey++) {
expected.put(rowKey, 1);
}
}
Assert.assertEquals(expected, result);
}
private void testReadRowRangeAllColumnsWithColumnCount(boolean rowDeleted) throws Exception {
List<TestRange> testRanges = getTestRanges();
TestRange range = testRanges.get(0);
Map<Integer, Integer> result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, range.start, range.end, 100)
.getColumnCounts()
.execute().getResult();
Map<Integer, Integer> expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer year : range.expectedRowKeys) {
expected.put(year, 4);
}
}
Assert.assertEquals(expected, result);
}
private void testReadRowRangeColumnRangeWithColumnCount(boolean rowDeleted) throws Exception {
List<TestRange> testRanges = getTestRanges();
for (TestRange testRange : testRanges) {
Map<Integer, Integer> result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.build())
.getColumnCounts()
.execute().getResult();
Map<Integer, Integer> expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey : testRange.expectedRowKeys) {
expected.put(rowKey, 2);
}
}
Assert.assertEquals(expected, result);
result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("CA")
.greaterThan("San Diego")
.build())
.getColumnCounts()
.execute().getResult();
expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey : testRange.expectedRowKeys) {
expected.put(rowKey, 1);
}
}
Assert.assertEquals(expected, result);
result = keyspace.prepareQuery(CF_POPULATION)
.getKeyRange(null, null, testRange.start, testRange.end, 100)
.withColumnRange(compSerializer.buildRange()
.withPrefix("WA")
.withPrefix("Seattle")
.withPrefix(40000)
.build())
.getColumnCounts()
.execute().getResult();
expected = new HashMap<Integer, Integer>();
if (!rowDeleted) {
for (Integer rowKey : testRange.expectedRowKeys) {
expected.put(rowKey, 1);
}
}
Assert.assertEquals(expected, result);
}
}
private void checkResult(ColumnList<Population> result, Population ... expected) throws Exception {
Assert.assertFalse(result.isEmpty());
Assert.assertEquals(expected.length, result.size());
int index = 0;
for (Population p : expected) {
Assert.assertEquals(p, result.getColumnByIndex(index++).getName());
}
}
private void checkRowResult(Rows<Integer, Population> result, Integer startKey, Integer size, Population ... expected) throws Exception {
int rowKey = startKey;
for (Row<Integer, Population> row : result) {
Assert.assertTrue(rowKey == row.getKey());
checkResult(row.getColumns(), expected);
rowKey++;
}
Assert.assertTrue("Result: " + result.size() + ", size: " + size, size == result.size());
}
private void checkRowResult(Rows<Integer, Population> result, List<Integer> rowKeys, Population ... expected) throws Exception {
int index = 0;
for (Row<Integer, Population> row : result) {
Assert.assertEquals(rowKeys.toString() + " " + row.getKey(), rowKeys.get(index++), row.getKey());
checkResult(row.getColumns(), expected);
}
Assert.assertTrue(rowKeys.size() == result.size());
}
/** TEST CITIES */
public static Population NewYork = new Population("NY", "New York", 10000);
public static Population SanDiego = new Population("CA", "San Diego", 20000);
public static Population SanFrancisco = new Population("CA", "San Francisco", 30000);
public static Population Seattle = new Population("WA", "Seattle", 40000);
public static class Population {
@Component(ordinal=0) String state;
@Component(ordinal=1) String city;
@Component(ordinal=2) Integer zipcode;
public Population() {
}
public Population(String state, String city, Integer zipcode) {
this.state = state;
this.city = city;
this.zipcode = zipcode;
}
public String toString() {
return "Population [" + state + ", " + city + ", " + zipcode + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((state == null) ? 0 : state.hashCode());
result = prime * result + ((city == null) ? 0 : city.hashCode());
result = prime * result + ((zipcode == null) ? 0 : zipcode.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null)return false;
if (getClass() != obj.getClass()) return false;
Population other = (Population) obj;
boolean equal = true;
equal &= (state != null) ? (state.equals(other.state)) : other.state == null;
equal &= (city != null) ? (city.equals(other.city)) : other.city == null;
equal &= (zipcode != null) ? (zipcode.equals(other.zipcode)) : other.zipcode == null;
return equal;
}
public Population clone() {
return new Population(state, city, zipcode);
}
}
/**
* 2014 --> -6625834866172541556 2003 --> -5952676706262623311 2009 --> -4850296245464368619
* 2010 --> -4012971246572234480 2005 --> -3904377230599730913 2006 --> -3604768136712843506
* 2012 --> -3193851331505022123 2007 --> -797272529921810676 2001 --> 267648259961407629
* 2002 --> 313927025611477591 2011 --> 2700799408278278395 2004 --> 5455601112738248795
* 2013 --> 8821734684824899422 2008 --> 9033513988054576353
*/
private static class TestRange {
private String start;
private String end;
private List<Integer> expectedRowKeys = new ArrayList<Integer>();
private TestRange(String start, String end, Integer ... rows) {
this.start = start;
this.end = end;
this.expectedRowKeys.addAll(Arrays.asList(rows));
}
}
private List<TestRange> getTestRanges() {
List<TestRange> list = new ArrayList<TestRange>();
list.add(new TestRange("-6625834866172541556", "-4850296245464368619", 2014, 2003, 2009));
list.add(new TestRange("-4012971246572234480", "-3604768136712843506", 2010, 2005, 2006));
list.add(new TestRange("-3193851331505022123", "267648259961407629", 2012, 2007, 2001));
list.add(new TestRange("313927025611477591", "5455601112738248795", 2002, 2011, 2004));
list.add(new TestRange("8821734684824899422", "9033513988054576353", 2013, 2008));
return list;
}
}
| |
package org.hisp.dhis.i18n;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.text.DateFormat;
import java.text.DateFormatSymbols;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.ResourceBundle;
import org.hisp.dhis.calendar.DateTimeUnit;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.period.WeeklyPeriodType;
/**
* @author Pham Thi Thuy
* @author Nguyen Dang Quang
* @version $Id: I18nFormat.java 2974 2007-03-03 22:11:13Z torgeilo $
*/
public class I18nFormat
{
private static final DecimalFormat FORMAT_VALUE = new DecimalFormat( "#.#" ); // Fixed for now
private static final String EMPTY = "";
private static final String NAN = "NaN";
private static final String INVALID_DATE = "Invalid date format";
public static final String FORMAT_DATE = "yyyy-MM-dd";
public static final String FORMAT_TIME = "HH:mm";
public static final String FORMAT_DATETIME = "yyyy-MM-dd HH:mm";
private ResourceBundle resourceBundle;
public I18nFormat( ResourceBundle resourceBundle )
{
this.resourceBundle = resourceBundle;
}
/**
* Constructor should only be used for testing purposes. Use
* I18nManager.getI18nFormat for normal use.
*/
public I18nFormat()
{
}
// -------------------------------------------------------------------------
// Init
// -------------------------------------------------------------------------
private DateFormatSymbols dateFormatSymbols;
public void init()
{
String[] months = { "month.january", "month.february", "month.march", "month.april", "month.may", "month.june",
"month.july", "month.august", "month.september", "month.october", "month.november", "month.december" };
String[] shortMonths = { "month.short.january", "month.short.february", "month.short.march",
"month.short.april", "month.short.may", "month.short.june", "month.short.july", "month.short.august",
"month.short.september", "month.short.october", "month.short.november", "month.short.december" };
String[] weekdays = { "weekday.sunday", "weekday.monday", "weekday.tuesday", "weekday.wednesday",
"weekday.thursday", "weekday.friday", "weekday.saturday" };
String[] shortWeekdays = { "weekday.short.sunday", "weekday.short.monday", "weekday.short.tuesday",
"weekday.short.wednesday", "weekday.short.thursday", "weekday.short.friday", "weekday.short.saturday" };
String calendarName = PeriodType.getCalendar().name() + ".";
for ( int i = 0; i < 12; ++i )
{
if ( resourceBundle.containsKey( calendarName + months[i] ) )
{
months[i] = resourceBundle.getString( calendarName + months[i] );
}
else
{
months[i] = resourceBundle.getString( months[i] );
}
if ( resourceBundle.containsKey( calendarName + shortMonths[i] ) )
{
shortMonths[i] = resourceBundle.getString( calendarName + shortMonths[i] );
}
else
{
shortMonths[i] = resourceBundle.getString( shortMonths[i] );
}
}
for ( int i = 0; i < 7; ++i )
{
if ( resourceBundle.containsKey( calendarName + weekdays[i] ) )
{
weekdays[i] = resourceBundle.getString( calendarName + weekdays[i] );
}
else
{
weekdays[i] = resourceBundle.getString( weekdays[i] );
}
if ( resourceBundle.containsKey( calendarName + shortWeekdays[i] ) )
{
shortWeekdays[i] = resourceBundle.getString( calendarName + shortWeekdays[i] );
}
else
{
shortWeekdays[i] = resourceBundle.getString( shortWeekdays[i] );
}
}
SimpleDateFormat dateFormat = new SimpleDateFormat();
dateFormatSymbols = dateFormat.getDateFormatSymbols();
dateFormatSymbols.setMonths( months );
dateFormatSymbols.setShortMonths( shortMonths );
dateFormatSymbols.setWeekdays( weekdays );
dateFormatSymbols.setShortWeekdays( shortWeekdays );
}
// -------------------------------------------------------------------------
// Format methods
// -------------------------------------------------------------------------
public Date parseDate( String date )
{
if ( date == null )
{
return null;
}
return commonParsing( date, FORMAT_DATE );
}
public Date parseTime( String time )
{
if ( time == null )
{
return null;
}
return commonParsing( time, FORMAT_TIME );
}
public Date parseDateTime( String dateTime )
{
if ( dateTime == null )
{
return null;
}
return commonParsing( dateTime, FORMAT_DATETIME );
}
public String formatDate( Date date )
{
if ( date == null )
{
return null;
}
return commonFormatting( date, FORMAT_DATE );
}
public String formatTime( Date date )
{
if ( date == null )
{
return null;
}
return commonFormatting( date, FORMAT_TIME );
}
public String formatDateTime( Date date )
{
if ( date == null )
{
return null;
}
return commonFormatting( date, FORMAT_DATETIME );
}
/**
* Formats a period. Returns null if value is null. Returns INVALID_DATE if formatting string is invalid.
*
* @param period the value to format.
*/
public String formatPeriod( Period period )
{
if ( period == null )
{
return null;
}
String typeName = period.getPeriodType().getName();
if ( typeName.equals( WeeklyPeriodType.NAME ) ) // Use ISO dates due to potential week confusion
{
return period.getIsoDate();
}
String keyStartDate = "format." + typeName + ".startDate";
String keyEndDate = "format." + typeName + ".endDate";
String startPattern = resourceBundle.getString( keyStartDate );
String endPattern = resourceBundle.getString( keyEndDate );
boolean dayPattern = startPattern.contains( "dd" ) || endPattern.contains( "dd" );
Date periodStartDate = period.getStartDate();
Date periodEndDate = period.getEndDate();
DateTimeUnit start = PeriodType.getCalendar().fromIso( periodStartDate );
DateTimeUnit end = PeriodType.getCalendar().fromIso( periodEndDate );
String startDate;
String endDate;
if ( !dayPattern )
{
// Set day to first of month to not overflow when converting to JDK date
start.setDay( 1 );
end.setDay( 1 );
startDate = commonFormatting( new DateTimeUnit( start, true ).toJdkDate(), startPattern );
endDate = commonFormatting( new DateTimeUnit( end, true ).toJdkDate(), endPattern );
}
else
{
startDate = PeriodType.getCalendar().formattedDate( startPattern, start );
endDate = PeriodType.getCalendar().formattedDate( endPattern, end );
}
try
{
return Character.toUpperCase( startDate.charAt( 0 ) ) + startDate.substring( 1 ) + endDate;
}
catch ( IllegalArgumentException ex )
{
return INVALID_DATE;
}
}
/**
* Formats value. Returns empty string if value is null. Returns NaN if value
* is not a number. Return a formatted string if value is an instance of Number,
* if not returns the value as a string.
*
* @param value the value to format.
*/
public String formatValue( Object value )
{
if ( value == null )
{
return EMPTY;
}
if ( value instanceof Number )
{
try
{
return FORMAT_VALUE.format( value );
}
catch ( IllegalArgumentException ex )
{
return NAN;
}
}
else
{
return String.valueOf( value );
}
}
// -------------------------------------------------------------------------
// Support methods
// -------------------------------------------------------------------------
private Date commonParsing( String input, String pattern )
{
DateFormat dateFormat = new SimpleDateFormat( pattern, dateFormatSymbols );
Date parsedDate;
try
{
parsedDate = dateFormat.parse( input );
}
catch ( ParseException e )
{
return null;
}
if ( !commonFormatting( parsedDate, pattern ).equals( input ) )
{
return null;
}
return parsedDate;
}
private String commonFormatting( Date date, String pattern )
{
DateFormat dateFormat = new SimpleDateFormat( pattern, dateFormatSymbols );
return dateFormat.format( date );
}
}
| |
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.tasks.testing.testng;
import org.gradle.api.internal.tasks.testing.DefaultTestClassDescriptor;
import org.gradle.api.internal.tasks.testing.DefaultTestMethodDescriptor;
import org.gradle.api.internal.tasks.testing.DefaultTestSuiteDescriptor;
import org.gradle.api.internal.tasks.testing.TestCompleteEvent;
import org.gradle.api.internal.tasks.testing.TestDescriptorInternal;
import org.gradle.api.internal.tasks.testing.TestResultProcessor;
import org.gradle.api.internal.tasks.testing.TestStartEvent;
import org.gradle.api.tasks.testing.TestResult;
import org.gradle.internal.id.IdGenerator;
import org.gradle.internal.time.Clock;
import org.testng.IMethodInstance;
import org.testng.ISuite;
import org.testng.ISuiteListener;
import org.testng.ITestClass;
import org.testng.ITestContext;
import org.testng.ITestListener;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.xml.XmlTest;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
public class TestNGTestResultProcessorAdapter implements ISuiteListener, ITestListener, TestNGConfigurationListener, TestNGClassListener {
private final TestResultProcessor resultProcessor;
private final IdGenerator<?> idGenerator;
private final Clock clock;
private final Object lock = new Object();
private final Map<ITestContext, Object> testId = new HashMap<ITestContext, Object>();
private final Map<ISuite, Object> suiteId = new HashMap<ISuite, Object>();
private final Map<XmlTest, Object> xmlTestIds = new HashMap<XmlTest, Object>();
private final Map<ITestClass, Object> testClassId = new HashMap<ITestClass, Object>();
private final Map<ITestResult, Object> testMethodId = new HashMap<ITestResult, Object>();
private final Map<ITestNGMethod, Object> testMethodParentId = new HashMap<ITestNGMethod, Object>();
private final Set<ITestResult> failedConfigurations = new HashSet<ITestResult>();
public TestNGTestResultProcessorAdapter(TestResultProcessor resultProcessor, IdGenerator<?> idGenerator, Clock clock) {
this.resultProcessor = resultProcessor;
this.idGenerator = idGenerator;
this.clock = clock;
}
@Override
public void onStart(ISuite suite) {
TestDescriptorInternal testInternal;
synchronized (lock) {
if (suiteId.containsKey(suite)) {
// Can get duplicate start events
return;
}
Object id = idGenerator.generateId();
testInternal = new DefaultTestSuiteDescriptor(id, suite.getName());
suiteId.put(suite, testInternal.getId());
}
resultProcessor.started(testInternal, new TestStartEvent(clock.getCurrentTime()));
}
@Override
public void onFinish(ISuite suite) {
Object id;
synchronized (lock) {
id = suiteId.remove(suite);
if (id == null) {
// Can get duplicate finish events
return;
}
}
resultProcessor.completed(id, new TestCompleteEvent(clock.getCurrentTime()));
}
@Override
public void onBeforeClass(ITestClass testClass) {
TestDescriptorInternal testInternal;
Object parentId;
synchronized (lock) {
testInternal = new DefaultTestClassDescriptor(idGenerator.generateId(), testClass.getName());
testClassId.put(testClass, testInternal.getId());
parentId = xmlTestIds.get(testClass.getXmlTest());
for (ITestNGMethod method : testClass.getTestMethods()) {
testMethodParentId.put(method, testInternal.getId());
}
}
resultProcessor.started(testInternal, new TestStartEvent(clock.getCurrentTime(), parentId));
}
@Override
public void onBeforeClass(ITestClass testClass, IMethodInstance mi) {
}
@Override
public void onAfterClass(ITestClass testClass) {
Object id;
synchronized (lock) {
id = testClassId.remove(testClass);
}
// Guard against TestNG calling this hook more than once with the same testClass.
// See https://github.com/cbeust/testng/issues/1618 for details.
if (id != null) {
resultProcessor.completed(id, new TestCompleteEvent(clock.getCurrentTime()));
}
}
@Override
public void onAfterClass(ITestClass testClass, IMethodInstance mi) {
}
@Override
public void onStart(ITestContext iTestContext) {
TestDescriptorInternal testInternal;
Object parentId;
synchronized (lock) {
Object id = idGenerator.generateId();
testInternal = new DefaultTestSuiteDescriptor(id, iTestContext.getName());
parentId = suiteId.get(iTestContext.getSuite());
xmlTestIds.put(iTestContext.getCurrentXmlTest(), id);
testId.put(iTestContext, testInternal.getId());
for (ITestNGMethod method : iTestContext.getAllTestMethods()) {
testMethodParentId.put(method, testInternal.getId());
}
}
resultProcessor.started(testInternal, new TestStartEvent(iTestContext.getStartDate().getTime(), parentId));
}
@Override
public void onFinish(ITestContext iTestContext) {
Object id;
synchronized (lock) {
id = testId.remove(iTestContext);
xmlTestIds.remove(iTestContext.getCurrentXmlTest());
for (ITestNGMethod method : iTestContext.getAllTestMethods()) {
testMethodParentId.remove(method);
}
}
resultProcessor.completed(id, new TestCompleteEvent(iTestContext.getEndDate().getTime()));
}
@Override
public void onTestStart(ITestResult iTestResult) {
TestDescriptorInternal testInternal;
Object parentId;
synchronized (lock) {
String name = calculateTestCaseName(iTestResult);
testInternal = new DefaultTestMethodDescriptor(idGenerator.generateId(), iTestResult.getTestClass().getName(), name);
Object oldTestId = testMethodId.put(iTestResult, testInternal.getId());
assert oldTestId == null : "Apparently some other test has started but it hasn't finished. "
+ "Expect the resultProcessor to break. "
+ "Don't expect to see this assertion stack trace due to the current architecture";
parentId = testMethodParentId.get(iTestResult.getMethod());
assert parentId != null;
}
resultProcessor.started(testInternal, new TestStartEvent(iTestResult.getStartMillis(), parentId));
if (iTestResult.getThrowable() instanceof UnrepresentableParameterException) {
throw (UnrepresentableParameterException) iTestResult.getThrowable();
}
}
private String calculateTestCaseName(ITestResult iTestResult) {
Object[] parameters = iTestResult.getParameters();
String name = iTestResult.getName();
if (parameters != null && parameters.length > 0) {
StringBuilder builder = new StringBuilder(name).
append("[").
append(iTestResult.getMethod().getCurrentInvocationCount()).
append("]");
StringBuilder paramsListBuilder = new StringBuilder("(");
int i = 0;
for (Object parameter : parameters) {
if (parameter == null) {
paramsListBuilder.append("null");
} else {
try {
paramsListBuilder.append(parameter.toString());
} catch (Exception e) {
// This may be thrown by the caller of this method at a later time
iTestResult.setThrowable(new UnrepresentableParameterException(iTestResult, i, e));
return builder.toString();
}
}
if (++i < parameters.length) {
paramsListBuilder.append(", ");
}
}
paramsListBuilder.append(")");
return builder.append(paramsListBuilder.toString()).toString();
} else {
return name;
}
}
@Override
public void onTestSuccess(ITestResult iTestResult) {
onTestFinished(iTestResult, TestResult.ResultType.SUCCESS);
}
@Override
public void onTestFailure(ITestResult iTestResult) {
onTestFinished(iTestResult, TestResult.ResultType.FAILURE);
}
@Override
public void onTestSkipped(ITestResult iTestResult) {
onTestFinished(iTestResult, TestResult.ResultType.SKIPPED);
}
@Override
public void onTestFailedButWithinSuccessPercentage(ITestResult iTestResult) {
onTestFinished(iTestResult, TestResult.ResultType.SUCCESS);
}
private void onTestFinished(ITestResult iTestResult, TestResult.ResultType resultType) {
Object testId;
TestStartEvent startEvent = null;
synchronized (lock) {
testId = testMethodId.remove(iTestResult);
if (testId == null) {
// This can happen when a method fails which this method depends on
testId = idGenerator.generateId();
Object parentId = testMethodParentId.get(iTestResult.getMethod());
startEvent = new TestStartEvent(iTestResult.getStartMillis(), parentId);
}
}
if (startEvent != null) {
// Synthesize a start event
resultProcessor.started(new DefaultTestMethodDescriptor(testId, iTestResult.getTestClass().getName(), iTestResult.getName()), startEvent);
}
if (resultType == TestResult.ResultType.FAILURE) {
resultProcessor.failure(testId, iTestResult.getThrowable());
}
resultProcessor.completed(testId, new TestCompleteEvent(iTestResult.getEndMillis(), resultType));
}
@Override
public void onConfigurationSuccess(ITestResult testResult) {
}
@Override
public void onConfigurationSkip(ITestResult testResult) {
}
@Override
public void onConfigurationFailure(ITestResult testResult) {
synchronized (lock) {
if (!failedConfigurations.add(testResult)) {
// workaround for bug in TestNG 6.2 (apparently fixed in some 6.3.x): listener is notified twice per event
return;
}
}
// Synthesise a test for the broken configuration method
ITestNGMethod testMethod = testResult.getMethod();
ITestClass testClass = testMethod.getTestClass();
TestDescriptorInternal test = new DefaultTestMethodDescriptor(idGenerator.generateId(), testClass.getName(), testMethod.getMethodName());
resultProcessor.started(test, new TestStartEvent(testResult.getStartMillis(), testClassId.get(testClass)));
resultProcessor.failure(test.getId(), testResult.getThrowable());
resultProcessor.completed(test.getId(), new TestCompleteEvent(testResult.getEndMillis(), TestResult.ResultType.FAILURE));
}
@Override
public void beforeConfiguration(ITestResult tr) {
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.server.conf.codec;
import static org.apache.accumulo.server.conf.codec.VersionedProperties.tsFormatter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.time.Instant;
import java.util.HashMap;
import java.util.Map;
/**
* Abstract class to provide encoding / decoding of versioned properties. This class handles the
* serialization of the metadata and subclasses are required to implement
* {@link #encodePayload(OutputStream, VersionedProperties, EncodingOptions)} and
* {@link #decodePayload(InputStream, EncodingOptions)} to handle any specific implementation
* metadata (optional) and the property map according to the encoding scheme of the subclass.
* <p>
* The basic encoding format:
* <ul>
* <li>encoding metadata - specifies codec to be used</li>
* <li>version metadata - specifies property versioning information</li>
* <li>codec specific metadata (optional)</li>
* <li>the property map</li>
* </ul>
*
*/
public abstract class VersionedPropCodec {
private final EncodingOptions encodingOpts;
public VersionedPropCodec(final EncodingOptions encodingOpts) {
this.encodingOpts = encodingOpts;
}
/**
* The general encoding options that apply to all encodings.
*
* @return the general options.
*/
public EncodingOptions getEncodingOpts() {
return encodingOpts;
}
/**
* Serialize the versioned properties. The version information on the properties is updated if the
* data is successfully serialized.
*
* @param vProps
* the versioned properties.
* @return a byte array with the serialized properties.
*/
public byte[] toBytes(final VersionedProperties vProps) throws IOException {
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos)) {
// write encoding metadata
encodingOpts.encode(dos);
// write version metadata
DataVersionInfo vMetadata =
new DataVersionInfo(vProps.getNextVersion(), vProps.getTimestamp());
vMetadata.write(dos);
// delegate property encoding to sub-class
encodePayload(bos, vProps, encodingOpts);
return bos.toByteArray();
}
}
/**
* Encode the properties and optionally any specific encoding metadata that is necessary to decode
* the payload with the scheme chosen.
*
* @param out
* an output stream
* @param vProps
* the versioned properties
* @param encodingOpts
* the general encoding options.
* @throws IOException
* if an error occurs writing to the underlying output stream.
*/
abstract void encodePayload(final OutputStream out, final VersionedProperties vProps,
final EncodingOptions encodingOpts) throws IOException;
public VersionedProperties fromBytes(final byte[] bytes) throws IOException {
try (ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
DataInputStream dis = new DataInputStream(bis)) {
EncodingOptions encodingOpts = EncodingOptions.fromDataStream(dis);
if (!checkCanDecodeVersion(encodingOpts)) {
throw new IllegalArgumentException(
"Invalid data version - cannot process the version read: "
+ encodingOpts.getEncodingVersion());
}
DataVersionInfo vMetadata = DataVersionInfo.fromDataStream(dis);
Map<String,String> props = decodePayload(bis, encodingOpts);
return new VersionedProperties(vMetadata.getDataVersion(), vMetadata.getTimestamp(), props);
}
}
abstract boolean checkCanDecodeVersion(final EncodingOptions encodingOpts);
/**
* Extracts the encoding version from the encoded byte array without fully decoding the payload.
* This is a convenience method if multiple encodings are present, and should only be required if
* upgrading / changing encodings, otherwise a single encoding should be in operation for an
* instance at any given time.
*
* @param bytes
* serialized encoded versioned property byte array.
* @return the encoding version used to serialize the properties.
*/
public static int getEncodingVersion(final byte[] bytes) {
try (ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
DataInputStream dis = new DataInputStream(bis)) {
return EncodingOptions.fromDataStream(dis).getEncodingVersion();
} catch (NullPointerException | IOException ex) {
throw new IllegalArgumentException("Failed to read encoding version from byte array provided",
ex);
}
}
/**
* Extracts the data version from the encoded byte array without fully decoding the payload.
* Normally the data version should be obtained from a fully decoded instance of the versioned
* properties.
* <p>
* The cost of reading the byte array from the backing store should be considered verses the
* additional cost of decoding - with a goal of reducing data reads from the store preferred.
* Generally reading from the store will be followed by some sort of usage which would require the
* full decode operation anyway.
*
* @param bytes
* serialized encoded versioned property byte array.
* @return the encoding version used to serialize the properties.
*/
public static int getDataVersion(final byte[] bytes) {
try (ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
DataInputStream dis = new DataInputStream(bis)) {
// skip encoding metadata
EncodingOptions.fromDataStream(dis);
return DataVersionInfo.fromDataStream(dis).getDataVersion();
} catch (NullPointerException | IOException ex) {
throw new IllegalArgumentException(
"Failed to read data version version from byte array provided", ex);
}
}
/**
* Decode the payload and any optional encoding specific metadata and return a map of the property
* name, value pairs.
*
* @param inStream
* an input stream
* @param encodingOpts
* the general encoding options.
* @return a map of properties name, value pairs.
* @throws IOException
* if an exception occurs reading from the input stream.
*/
abstract Map<String,String> decodePayload(final InputStream inStream,
final EncodingOptions encodingOpts) throws IOException;
/**
* Read the property map from a data input stream as UTF strings. The input stream should be
* created configured by sub-classes for the output of the sub-class. If the sub-class uses an
* encoding other that UTF strings, they should override this method. An example would be an
* encoding that uses JSON to encode the map.
* <p>
* The handling the properties as UTF strings is one implementation. Subclasses can implement
* different mechanism if desired, one example might be using a JSON implementation to encode /
* decode the properties.
*
* @param dis
* a data input stream
* @return the property map
* @throws IOException
* if an exception occurs reading from the stream.
*/
Map<String,String> readMapAsUTF(DataInputStream dis) throws IOException {
Map<String,String> aMap = new HashMap<>();
int items = dis.readInt();
for (int i = 0; i < items; i++) {
String k = dis.readUTF();
String v = dis.readUTF();
aMap.put(k, v);
}
return aMap;
}
/**
* Write the property map to the data output stream. The underlying stream is not closed by this
* method.
* <p>
* The handling the properties as UTF strings is one implementation. Subclasses can implement
* different mechanism if desired, one example might be using a JSON implementation to encode /
* decode the properties.
*
* @param dos
* a data output stream
* @param aMap
* the property map of k, v string pairs.
* @throws IOException
* if an exception occurs.
*/
void writeMapAsUTF(final DataOutputStream dos, final Map<String,String> aMap) throws IOException {
dos.writeInt(aMap.size());
for (Map.Entry<String,String> e : aMap.entrySet()) {
dos.writeUTF(e.getKey());
dos.writeUTF(e.getValue());
}
dos.flush();
}
/**
* Helper class for reading / writing versioned properties metadata.
*/
static class DataVersionInfo {
private final int dataVersion;
private final Instant timestamp;
public DataVersionInfo(final int dataVersion, final Instant timestamp) {
this.dataVersion = dataVersion;
this.timestamp = timestamp;
}
public static DataVersionInfo fromDataStream(final DataInputStream dis) throws IOException {
try {
var dataVersion = dis.readInt();
var timestamp = tsFormatter.parse(dis.readUTF(), Instant::from);
return new DataVersionInfo(dataVersion, timestamp);
} catch (Exception ex) {
throw new IOException("Could not parse data version info", ex);
}
}
public int getDataVersion() {
return dataVersion;
}
public Instant getTimestamp() {
return timestamp;
}
public void write(final DataOutputStream dos) throws IOException {
dos.writeInt(dataVersion);
dos.writeUTF(tsFormatter.format(timestamp));
}
}
}
| |
/*
* @(#)ConcurrentSkipListSet.java 1.4 06/05/10
*
* Copyright 2006 Sun Microsystems, Inc. All rights reserved.
* SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
package java.util.concurrent;
import java.util.*;
import sun.misc.Unsafe;
/**
* A scalable concurrent {@link NavigableSet} implementation based on
* a {@link ConcurrentSkipListMap}. The elements of the set are kept
* sorted according to their {@linkplain Comparable natural ordering},
* or by a {@link Comparator} provided at set creation time, depending
* on which constructor is used.
*
* <p>This implementation provides expected average <i>log(n)</i> time
* cost for the <tt>contains</tt>, <tt>add</tt>, and <tt>remove</tt>
* operations and their variants. Insertion, removal, and access
* operations safely execute concurrently by multiple threads.
* Iterators are <i>weakly consistent</i>, returning elements
* reflecting the state of the set at some point at or since the
* creation of the iterator. They do <em>not</em> throw {@link
* ConcurrentModificationException}, and may proceed concurrently with
* other operations. Ascending ordered views and their iterators are
* faster than descending ones.
*
* <p>Beware that, unlike in most collections, the <tt>size</tt>
* method is <em>not</em> a constant-time operation. Because of the
* asynchronous nature of these sets, determining the current number
* of elements requires a traversal of the elements. Additionally, the
* bulk operations <tt>addAll</tt>, <tt>removeAll</tt>,
* <tt>retainAll</tt>, and <tt>containsAll</tt> are <em>not</em>
* guaranteed to be performed atomically. For example, an iterator
* operating concurrently with an <tt>addAll</tt> operation might view
* only some of the added elements.
*
* <p>This class and its iterators implement all of the
* <em>optional</em> methods of the {@link Set} and {@link Iterator}
* interfaces. Like most other concurrent collection implementations,
* this class does not permit the use of <tt>null</tt> elements,
* because <tt>null</tt> arguments and return values cannot be reliably
* distinguished from the absence of elements.
*
* <p>This class is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
* Java Collections Framework</a>.
*
* @author Doug Lea
* @param <E> the type of elements maintained by this set
* @since 1.6
*/
public class ConcurrentSkipListSet<E>
extends AbstractSet<E>
implements NavigableSet<E>, Cloneable, java.io.Serializable {
private static final long serialVersionUID = -2479143111061671589L;
/**
* The underlying map. Uses Boolean.TRUE as value for each
* element. This field is declared final for the sake of thread
* safety, which entails some ugliness in clone()
*/
private final ConcurrentNavigableMap<E,Object> m;
/**
* Constructs a new, empty set that orders its elements according to
* their {@linkplain Comparable natural ordering}.
*/
public ConcurrentSkipListSet() {
m = new ConcurrentSkipListMap<E,Object>();
}
/**
* Constructs a new, empty set that orders its elements according to
* the specified comparator.
*
* @param comparator the comparator that will be used to order this set.
* If <tt>null</tt>, the {@linkplain Comparable natural
* ordering} of the elements will be used.
*/
public ConcurrentSkipListSet(Comparator<? super E> comparator) {
m = new ConcurrentSkipListMap<E,Object>(comparator);
}
/**
* Constructs a new set containing the elements in the specified
* collection, that orders its elements according to their
* {@linkplain Comparable natural ordering}.
*
* @param c The elements that will comprise the new set
* @throws ClassCastException if the elements in <tt>c</tt> are
* not {@link Comparable}, or are not mutually comparable
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
public ConcurrentSkipListSet(Collection<? extends E> c) {
m = new ConcurrentSkipListMap<E,Object>();
addAll(c);
}
/**
* Constructs a new set containing the same elements and using the
* same ordering as the specified sorted set.
*
* @param s sorted set whose elements will comprise the new set
* @throws NullPointerException if the specified sorted set or any
* of its elements are null
*/
public ConcurrentSkipListSet(SortedSet<E> s) {
m = new ConcurrentSkipListMap<E,Object>(s.comparator());
addAll(s);
}
/**
* For use by submaps
*/
ConcurrentSkipListSet(ConcurrentNavigableMap<E,Object> m) {
this.m = m;
}
/**
* Returns a shallow copy of this <tt>ConcurrentSkipListSet</tt>
* instance. (The elements themselves are not cloned.)
*
* @return a shallow copy of this set
*/
public ConcurrentSkipListSet<E> clone() {
ConcurrentSkipListSet<E> clone = null;
try {
clone = (ConcurrentSkipListSet<E>) super.clone();
clone.setMap(new ConcurrentSkipListMap(m));
} catch (CloneNotSupportedException e) {
throw new InternalError();
}
return clone;
}
/* ---------------- Set operations -------------- */
/**
* Returns the number of elements in this set. If this set
* contains more than <tt>Integer.MAX_VALUE</tt> elements, it
* returns <tt>Integer.MAX_VALUE</tt>.
*
* <p>Beware that, unlike in most collections, this method is
* <em>NOT</em> a constant-time operation. Because of the
* asynchronous nature of these sets, determining the current
* number of elements requires traversing them all to count them.
* Additionally, it is possible for the size to change during
* execution of this method, in which case the returned result
* will be inaccurate. Thus, this method is typically not very
* useful in concurrent applications.
*
* @return the number of elements in this set
*/
public int size() {
return m.size();
}
/**
* Returns <tt>true</tt> if this set contains no elements.
* @return <tt>true</tt> if this set contains no elements
*/
public boolean isEmpty() {
return m.isEmpty();
}
/**
* Returns <tt>true</tt> if this set contains the specified element.
* More formally, returns <tt>true</tt> if and only if this set
* contains an element <tt>e</tt> such that <tt>o.equals(e)</tt>.
*
* @param o object to be checked for containment in this set
* @return <tt>true</tt> if this set contains the specified element
* @throws ClassCastException if the specified element cannot be
* compared with the elements currently in this set
* @throws NullPointerException if the specified element is null
*/
public boolean contains(Object o) {
return m.containsKey(o);
}
/**
* Adds the specified element to this set if it is not already present.
* More formally, adds the specified element <tt>e</tt> to this set if
* the set contains no element <tt>e2</tt> such that <tt>e.equals(e2)</tt>.
* If this set already contains the element, the call leaves the set
* unchanged and returns <tt>false</tt>.
*
* @param e element to be added to this set
* @return <tt>true</tt> if this set did not already contain the
* specified element
* @throws ClassCastException if <tt>e</tt> cannot be compared
* with the elements currently in this set
* @throws NullPointerException if the specified element is null
*/
public boolean add(E e) {
return m.putIfAbsent(e, Boolean.TRUE) == null;
}
/**
* Removes the specified element from this set if it is present.
* More formally, removes an element <tt>e</tt> such that
* <tt>o.equals(e)</tt>, if this set contains such an element.
* Returns <tt>true</tt> if this set contained the element (or
* equivalently, if this set changed as a result of the call).
* (This set will not contain the element once the call returns.)
*
* @param o object to be removed from this set, if present
* @return <tt>true</tt> if this set contained the specified element
* @throws ClassCastException if <tt>o</tt> cannot be compared
* with the elements currently in this set
* @throws NullPointerException if the specified element is null
*/
public boolean remove(Object o) {
return m.remove(o, Boolean.TRUE);
}
/**
* Removes all of the elements from this set.
*/
public void clear() {
m.clear();
}
/**
* Returns an iterator over the elements in this set in ascending order.
*
* @return an iterator over the elements in this set in ascending order
*/
public Iterator<E> iterator() {
return m.navigableKeySet().iterator();
}
/**
* Returns an iterator over the elements in this set in descending order.
*
* @return an iterator over the elements in this set in descending order
*/
public Iterator<E> descendingIterator() {
return m.descendingKeySet().iterator();
}
/* ---------------- AbstractSet Overrides -------------- */
/**
* Compares the specified object with this set for equality. Returns
* <tt>true</tt> if the specified object is also a set, the two sets
* have the same size, and every member of the specified set is
* contained in this set (or equivalently, every member of this set is
* contained in the specified set). This definition ensures that the
* equals method works properly across different implementations of the
* set interface.
*
* @param o the object to be compared for equality with this set
* @return <tt>true</tt> if the specified object is equal to this set
*/
public boolean equals(Object o) {
// Override AbstractSet version to avoid calling size()
if (o == this)
return true;
if (!(o instanceof Set))
return false;
Collection<?> c = (Collection<?>) o;
try {
return containsAll(c) && c.containsAll(this);
} catch (ClassCastException unused) {
return false;
} catch (NullPointerException unused) {
return false;
}
}
/**
* Removes from this set all of its elements that are contained in
* the specified collection. If the specified collection is also
* a set, this operation effectively modifies this set so that its
* value is the <i>asymmetric set difference</i> of the two sets.
*
* @param c collection containing elements to be removed from this set
* @return <tt>true</tt> if this set changed as a result of the call
* @throws ClassCastException if the types of one or more elements in this
* set are incompatible with the specified collection
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
public boolean removeAll(Collection<?> c) {
// Override AbstractSet version to avoid unnecessary call to size()
boolean modified = false;
for (Iterator<?> i = c.iterator(); i.hasNext(); )
if (remove(i.next()))
modified = true;
return modified;
}
/* ---------------- Relational operations -------------- */
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if the specified element is null
*/
public E lower(E e) {
return m.lowerKey(e);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if the specified element is null
*/
public E floor(E e) {
return m.floorKey(e);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if the specified element is null
*/
public E ceiling(E e) {
return m.ceilingKey(e);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if the specified element is null
*/
public E higher(E e) {
return m.higherKey(e);
}
public E pollFirst() {
Map.Entry<E,Object> e = m.pollFirstEntry();
return e == null? null : e.getKey();
}
public E pollLast() {
Map.Entry<E,Object> e = m.pollLastEntry();
return e == null? null : e.getKey();
}
/* ---------------- SortedSet operations -------------- */
public Comparator<? super E> comparator() {
return m.comparator();
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E first() {
return m.firstKey();
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E last() {
return m.lastKey();
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code fromElement} or
* {@code toElement} is null
* @throws IllegalArgumentException {@inheritDoc}
*/
public NavigableSet<E> subSet(E fromElement,
boolean fromInclusive,
E toElement,
boolean toInclusive) {
return new ConcurrentSkipListSet<E>
(m.subMap(fromElement, fromInclusive,
toElement, toInclusive));
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code toElement} is null
* @throws IllegalArgumentException {@inheritDoc}
*/
public NavigableSet<E> headSet(E toElement, boolean inclusive) {
return new ConcurrentSkipListSet<E>(m.headMap(toElement, inclusive));
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code fromElement} is null
* @throws IllegalArgumentException {@inheritDoc}
*/
public NavigableSet<E> tailSet(E fromElement, boolean inclusive) {
return new ConcurrentSkipListSet<E>(m.tailMap(fromElement, inclusive));
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code fromElement} or
* {@code toElement} is null
* @throws IllegalArgumentException {@inheritDoc}
*/
public NavigableSet<E> subSet(E fromElement, E toElement) {
return subSet(fromElement, true, toElement, false);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code toElement} is null
* @throws IllegalArgumentException {@inheritDoc}
*/
public NavigableSet<E> headSet(E toElement) {
return headSet(toElement, false);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code fromElement} is null
* @throws IllegalArgumentException {@inheritDoc}
*/
public NavigableSet<E> tailSet(E fromElement) {
return tailSet(fromElement, true);
}
/**
* Returns a reverse order view of the elements contained in this set.
* The descending set is backed by this set, so changes to the set are
* reflected in the descending set, and vice-versa.
*
* <p>The returned set has an ordering equivalent to
* <tt>{@link Collections#reverseOrder(Comparator) Collections.reverseOrder}(comparator())</tt>.
* The expression {@code s.descendingSet().descendingSet()} returns a
* view of {@code s} essentially equivalent to {@code s}.
*
* @return a reverse order view of this set
*/
public NavigableSet<E> descendingSet() {
return new ConcurrentSkipListSet(m.descendingMap());
}
// Support for resetting map in clone
private static final Unsafe unsafe = Unsafe.getUnsafe();
private static final long mapOffset;
static {
try {
mapOffset = unsafe.objectFieldOffset
(ConcurrentSkipListSet.class.getDeclaredField("m"));
} catch (Exception ex) { throw new Error(ex); }
}
private void setMap(ConcurrentNavigableMap<E,Object> map) {
unsafe.putObjectVolatile(this, mapOffset, map);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.font;
import java.awt.geom.GeneralPath;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.fontbox.cff.Type2CharString;
import org.apache.fontbox.cmap.CMap;
import org.apache.fontbox.ttf.CmapSubtable;
import org.apache.fontbox.ttf.GlyphData;
import org.apache.fontbox.ttf.OTFParser;
import org.apache.fontbox.ttf.OpenTypeFont;
import org.apache.fontbox.ttf.TTFParser;
import org.apache.fontbox.ttf.TrueTypeFont;
import org.apache.fontbox.util.BoundingBox;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.cos.COSStream;
import org.apache.pdfbox.io.IOUtils;
import org.apache.pdfbox.pdmodel.common.PDStream;
import org.apache.pdfbox.util.Matrix;
/**
* Type 2 CIDFont (TrueType).
*
* @author Ben Litchfield
*/
public class PDCIDFontType2 extends PDCIDFont
{
private static final Log LOG = LogFactory.getLog(PDCIDFontType2.class);
private final TrueTypeFont ttf;
private final int[] cid2gid;
private final Map<Integer, Integer> gid2cid;
private final boolean hasIdentityCid2Gid;
private final boolean isEmbedded;
private final boolean isDamaged;
private final CmapSubtable cmap; // may be null
private Matrix fontMatrix;
/**
* Constructor.
*
* @param fontDictionary The font dictionary according to the PDF specification.
*/
public PDCIDFontType2(COSDictionary fontDictionary, PDType0Font parent) throws IOException
{
super(fontDictionary, parent);
PDFontDescriptor fd = getFontDescriptor();
PDStream ff2Stream = fd.getFontFile2();
PDStream ff3Stream = fd.getFontFile3();
// Acrobat looks in FontFile too, even though it is not in the spec, see PDFBOX-2599
if (ff2Stream == null && ff3Stream == null)
{
ff2Stream = fd.getFontFile();
}
TrueTypeFont ttfFont = null;
boolean fontIsDamaged = false;
if (ff2Stream != null)
{
try
{
// embedded
TTFParser ttfParser = new TTFParser(true);
ttfFont = ttfParser.parse(ff2Stream.createInputStream());
}
catch (NullPointerException e) // TTF parser is buggy
{
LOG.warn("Could not read embedded TTF for font " + getBaseFont(), e);
fontIsDamaged = true;
}
catch (IOException e)
{
LOG.warn("Could not read embedded TTF for font " + getBaseFont(), e);
fontIsDamaged = true;
}
}
else if (ff3Stream != null)
{
try
{
// embedded
OTFParser otfParser = new OTFParser(true);
OpenTypeFont otf = otfParser.parse(ff3Stream.createInputStream());
ttfFont = otf;
if (otf.isPostScript())
{
// todo: we need more abstraction to support CFF fonts here
throw new IOException("Not implemented: OpenType font with CFF table " +
getBaseFont());
}
if (otf.hasLayoutTables())
{
LOG.error("OpenType Layout tables used in font " + getBaseFont() +
" are not implemented in PDFBox and will be ignored");
}
}
catch (NullPointerException e) // TTF parser is buggy
{
fontIsDamaged = true;
LOG.warn("Could not read embedded OTF for font " + getBaseFont(), e);
}
catch (IOException e)
{
fontIsDamaged = true;
LOG.warn("Could not read embedded OTF for font " + getBaseFont(), e);
}
}
isEmbedded = ttfFont != null;
isDamaged = fontIsDamaged;
if (ttfFont == null)
{
// find font or substitute
CIDFontMapping mapping = FontMapper.getCIDFont(getBaseFont(), getFontDescriptor(),
getCIDSystemInfo());
if (mapping.isCIDFont())
{
ttfFont = mapping.getFont();
}
else
{
ttfFont = (TrueTypeFont)mapping.getTrueTypeFont();
}
if (mapping.isFallback())
{
LOG.warn("Using fallback font " + ttfFont.getName() + " for CID-keyed TrueType font " + getBaseFont());
}
}
ttf = ttfFont;
cmap = ttf.getUnicodeCmap(false);
cid2gid = readCIDToGIDMap();
gid2cid = invert(cid2gid);
COSBase map = dict.getDictionaryObject(COSName.CID_TO_GID_MAP);
hasIdentityCid2Gid = map instanceof COSName && ((COSName) map).getName().equals("Identity");
}
@Override
public Matrix getFontMatrix()
{
if (fontMatrix == null)
{
// 1000 upem, this is not strictly true
fontMatrix = new Matrix(0.001f, 0, 0, 0.001f, 0, 0);
}
return fontMatrix;
}
@Override
public BoundingBox getBoundingBox() throws IOException
{
return ttf.getFontBBox();
}
private int[] readCIDToGIDMap() throws IOException
{
int[] cid2gid = null;
COSBase map = dict.getDictionaryObject(COSName.CID_TO_GID_MAP);
if (map instanceof COSStream)
{
COSStream stream = (COSStream) map;
InputStream is = stream.getUnfilteredStream();
byte[] mapAsBytes = IOUtils.toByteArray(is);
IOUtils.closeQuietly(is);
int numberOfInts = mapAsBytes.length / 2;
cid2gid = new int[numberOfInts];
int offset = 0;
for (int index = 0; index < numberOfInts; index++)
{
int gid = (mapAsBytes[offset] & 0xff) << 8 | mapAsBytes[offset + 1] & 0xff;
cid2gid[index] = gid;
offset += 2;
}
}
return cid2gid;
}
private Map<Integer, Integer> invert(int[] cid2gid)
{
if (cid2gid == null)
{
return null;
}
Map<Integer, Integer> inverse = new HashMap<Integer, Integer>();
for (int i = 0; i < cid2gid.length; i++)
{
inverse.put(cid2gid[i], i);
}
return inverse;
}
@Override
public int codeToCID(int code)
{
CMap cMap = parent.getCMap();
// Acrobat allows bad PDFs to use Unicode CMaps here instead of CID CMaps, see PDFBOX-1283
if (!cMap.hasCIDMappings() && cMap.hasUnicodeMappings())
{
return cMap.toUnicode(code).codePointAt(0); // actually: code -> CID
}
return cMap.toCID(code);
}
/**
* Returns the GID for the given character code.
*
* @param code character code
* @return GID
*/
public int codeToGID(int code) throws IOException
{
if (!isEmbedded)
{
// The conforming reader shall select glyphs by translating characters from the
// encoding specified by the predefined CMap to one of the encodings in the TrueType
// font's 'cmap' table. The means by which this is accomplished are implementation-
// dependent.
boolean hasUnicodeMap = parent.getCMapUCS2() != null;
if (cid2gid != null)
{
// Acrobat allows non-embedded GIDs - todo: can we find a test PDF for this?
int cid = codeToCID(code);
return cid2gid[cid];
}
else if (hasIdentityCid2Gid || !hasUnicodeMap)
{
// same as above, but for the default Identity CID2GIDMap or when there is no
// ToUnicode CMap to fallback to, see PDFBOX-2599 and PDFBOX-2560
// todo: can we find a test PDF for the Identity case?
return codeToCID(code);
}
else
{
// fallback to the ToUnicode CMap, test with PDFBOX-1422 and PDFBOX-2560
String unicode = parent.toUnicode(code);
if (unicode == null)
{
LOG.warn("Failed to find a character mapping for " + code + " in " + getName());
return 0;
}
else if (unicode.length() > 1)
{
LOG.warn("Trying to map multi-byte character using 'cmap', result will be poor");
}
// a non-embedded font always has a cmap (otherwise FontMapper won't load it)
return cmap.getGlyphId(unicode.codePointAt(0));
}
}
else
{
// If the TrueType font program is embedded, the Type 2 CIDFont dictionary shall contain
// a CIDToGIDMap entry that maps CIDs to the glyph indices for the appropriate glyph
// descriptions in that font program.
int cid = codeToCID(code);
if (cid2gid != null)
{
// use CIDToGIDMap
if (cid < cid2gid.length)
{
return cid2gid[cid];
}
else
{
return 0;
}
}
else
{
// "Identity" is the default CIDToGIDMap
if (cid < ttf.getNumberOfGlyphs())
{
return cid;
}
else
{
// out of range CIDs map to GID 0
return 0;
}
}
}
}
@Override
public float getHeight(int code) throws IOException
{
// todo: really we want the BBox, (for text extraction:)
return (ttf.getHorizontalHeader().getAscender() + -ttf.getHorizontalHeader().getDescender())
/ ttf.getUnitsPerEm(); // todo: shouldn't this be the yMax/yMin?
}
@Override
public float getWidthFromFont(int code) throws IOException
{
int gid = codeToGID(code);
int width = ttf.getAdvanceWidth(gid);
int unitsPerEM = ttf.getUnitsPerEm();
if (unitsPerEM != 1000)
{
width *= 1000f / unitsPerEM;
}
return width;
}
@Override
public byte[] encode(int unicode)
{
int cid = -1;
if (isEmbedded)
{
// embedded fonts always use CIDToGIDMap, with Identity as the default
if (parent.getCMap().getName().startsWith("Identity-"))
{
if (cmap != null)
{
cid = cmap.getGlyphId(unicode);
}
}
else
{
// if the CMap is predefined then there will be a UCS-2 CMap
if (parent.getCMapUCS2() != null)
{
cid = parent.getCMapUCS2().toCID(unicode);
}
}
// otherwise we require an explicit ToUnicode CMap
if (cid == -1)
{
// todo: invert the ToUnicode CMap?
cid = 0;
}
}
else
{
// a non-embedded font always has a cmap (otherwise it we wouldn't load it)
cid = cmap.getGlyphId(unicode);
}
if (cid == 0)
{
throw new IllegalArgumentException(
String.format("No glyph for U+%04X in font %s", unicode, getName()));
}
// CID is always 2-bytes (16-bit) for TrueType
return new byte[] { (byte)(cid >> 8 & 0xff), (byte)(cid & 0xff) };
}
@Override
public boolean isEmbedded()
{
return isEmbedded;
}
@Override
public boolean isDamaged()
{
return isDamaged;
}
/**
* Returns the embedded or substituted TrueType font. May be an OpenType font if the font is
* not embedded.
*/
public TrueTypeFont getTrueTypeFont()
{
return ttf;
}
@Override
public GeneralPath getPath(int code) throws IOException
{
if (ttf instanceof OpenTypeFont && ((OpenTypeFont)ttf).isPostScript())
{
int cid = codeToCID(code);
Type2CharString charstring = ((OpenTypeFont)ttf).getCFF().getFont().getType2CharString(cid);
return charstring.getPath();
}
else
{
int gid = codeToGID(code);
GlyphData glyph = ttf.getGlyph().getGlyph(gid);
if (glyph != null)
{
return glyph.getPath();
}
return new GeneralPath();
}
}
@Override
public boolean hasGlyph(int code) throws IOException
{
return codeToGID(code) != 0;
}
}
| |
/**
*
*/
package br.com.swconsultoria.efd.contribuicoes.registros.blocoF;
import java.util.ArrayList;
import java.util.List;
/**
* @author Yuri Lemes
*
*/
public class RegistroF500 {
private final String reg = "F500";
private String vl_rec_caixa;
private String cst_pis;
private String vl_desc_pis;
private String vl_bc_pis;
private String aliq_pis_percentual;
private String vl_pis;
private String cst_cofins;
private String vl_desc_cofins;
private String vl_bc_cofins;
private String aliq_cofins_percentual;
private String vl_cofins;
private String cod_mod;
private String cfop;
private String cod_cta;
private String info_compl;
private List<RegistroF509> registroF509;
/**
* @return the reg
*/
public String getReg() {
return reg;
}
/**
* @return the vl_rec_caixa
*/
public String getVl_rec_caixa() {
return vl_rec_caixa;
}
/**
* @return the cst_pis
*/
public String getCst_pis() {
return cst_pis;
}
/**
* @return the vl_desc_pis
*/
public String getVl_desc_pis() {
return vl_desc_pis;
}
/**
* @return the vl_bc_pis
*/
public String getVl_bc_pis() {
return vl_bc_pis;
}
/**
* @return the aliq_pis_percentual
*/
public String getAliq_pis_percentual() {
return aliq_pis_percentual;
}
/**
* @return the vl_pis
*/
public String getVl_pis() {
return vl_pis;
}
/**
* @return the cst_cofins
*/
public String getCst_cofins() {
return cst_cofins;
}
/**
* @return the vl_desc_cofins
*/
public String getVl_desc_cofins() {
return vl_desc_cofins;
}
/**
* @return the vl_bc_cofins
*/
public String getVl_bc_cofins() {
return vl_bc_cofins;
}
/**
* @return the aliq_cofins_percentual
*/
public String getAliq_cofins_percentual() {
return aliq_cofins_percentual;
}
/**
* @return the vl_cofins
*/
public String getVl_cofins() {
return vl_cofins;
}
/**
* @return the cod_mod
*/
public String getCod_mod() {
return cod_mod;
}
/**
* @return the cfop
*/
public String getCfop() {
return cfop;
}
/**
* @return the cod_cta
*/
public String getCod_cta() {
return cod_cta;
}
/**
* @return the info_compl
*/
public String getInfo_compl() {
return info_compl;
}
/**
* @return the registroF509
*/
public List<RegistroF509> getRegistroF509() {
if(registroF509 == null){
registroF509 = new ArrayList<RegistroF509>();
}
return registroF509;
}
/**
* @param vl_rec_caixa
* the vl_rec_caixa to set
*/
public void setVl_rec_caixa(String vl_rec_caixa) {
this.vl_rec_caixa = vl_rec_caixa;
}
/**
* @param cst_pis
* the cst_pis to set
*/
public void setCst_pis(String cst_pis) {
this.cst_pis = cst_pis;
}
/**
* @param vl_desc_pis
* the vl_desc_pis to set
*/
public void setVl_desc_pis(String vl_desc_pis) {
this.vl_desc_pis = vl_desc_pis;
}
/**
* @param vl_bc_pis
* the vl_bc_pis to set
*/
public void setVl_bc_pis(String vl_bc_pis) {
this.vl_bc_pis = vl_bc_pis;
}
/**
* @param aliq_pis_percentual
* the aliq_pis_percentual to set
*/
public void setAliq_pis_percentual(String aliq_pis_percentual) {
this.aliq_pis_percentual = aliq_pis_percentual;
}
/**
* @param vl_pis
* the vl_pis to set
*/
public void setVl_pis(String vl_pis) {
this.vl_pis = vl_pis;
}
/**
* @param cst_cofins
* the cst_cofins to set
*/
public void setCst_cofins(String cst_cofins) {
this.cst_cofins = cst_cofins;
}
/**
* @param vl_desc_cofins
* the vl_desc_cofins to set
*/
public void setVl_desc_cofins(String vl_desc_cofins) {
this.vl_desc_cofins = vl_desc_cofins;
}
/**
* @param vl_bc_cofins
* the vl_bc_cofins to set
*/
public void setVl_bc_cofins(String vl_bc_cofins) {
this.vl_bc_cofins = vl_bc_cofins;
}
/**
* @param aliq_cofins_percentual
* the aliq_cofins_percentual to set
*/
public void setAliq_cofins_percentual(String aliq_cofins_percentual) {
this.aliq_cofins_percentual = aliq_cofins_percentual;
}
/**
* @param vl_cofins
* the vl_cofins to set
*/
public void setVl_cofins(String vl_cofins) {
this.vl_cofins = vl_cofins;
}
/**
* @param cod_mod
* the cod_mod to set
*/
public void setCod_mod(String cod_mod) {
this.cod_mod = cod_mod;
}
/**
* @param cfop
* the cfop to set
*/
public void setCfop(String cfop) {
this.cfop = cfop;
}
/**
* @param cod_cta
* the cod_cta to set
*/
public void setCod_cta(String cod_cta) {
this.cod_cta = cod_cta;
}
/**
* @param info_compl
* the info_compl to set
*/
public void setInfo_compl(String info_compl) {
this.info_compl = info_compl;
}
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.file;
import java.io.DataOutput;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
import edu.uci.ics.asterix.common.context.ITransactionSubsystemProvider;
import edu.uci.ics.asterix.common.context.TransactionSubsystemProvider;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
import edu.uci.ics.asterix.common.transactions.IRecoveryManager.ResourceType;
import edu.uci.ics.asterix.common.transactions.JobId;
import edu.uci.ics.asterix.external.indexing.operators.ExternalIndexBulkModifyOperatorDescriptor;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.ExternalFile;
import edu.uci.ics.asterix.metadata.external.IndexingConstants;
import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
import edu.uci.ics.asterix.runtime.evaluators.functions.AndDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.CastRecordDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.IsNullDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NotDescriptor;
import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexInstantSearchOperationCallbackFactory;
import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexOperationTrackerProvider;
import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import edu.uci.ics.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory;
import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
import edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
import edu.uci.ics.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.IJobletEventListenerFactory;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@SuppressWarnings("rawtypes")
// TODO: We should eventually have a hierarchy of classes that can create all
// possible index job specs,
// not just for creation.
public abstract class SecondaryIndexOperationsHelper {
protected final PhysicalOptimizationConfig physOptConf;
protected int numPrimaryKeys;
protected int numSecondaryKeys;
protected AqlMetadataProvider metadataProvider;
protected String dataverseName;
protected String datasetName;
protected Dataset dataset;
protected ARecordType itemType;
protected ISerializerDeserializer payloadSerde;
protected IFileSplitProvider primaryFileSplitProvider;
protected AlgebricksPartitionConstraint primaryPartitionConstraint;
protected IFileSplitProvider secondaryFileSplitProvider;
protected AlgebricksPartitionConstraint secondaryPartitionConstraint;
protected String secondaryIndexName;
protected boolean anySecondaryKeyIsNullable = false;
protected boolean isEnforcingKeyTypes = false;
protected long numElementsHint;
protected IBinaryComparatorFactory[] primaryComparatorFactories;
protected int[] primaryBloomFilterKeyFields;
protected RecordDescriptor primaryRecDesc;
protected IBinaryComparatorFactory[] secondaryComparatorFactories;
protected ITypeTraits[] secondaryTypeTraits;
protected int[] secondaryBloomFilterKeyFields;
protected RecordDescriptor secondaryRecDesc;
protected ICopyEvaluatorFactory[] secondaryFieldAccessEvalFactories;
protected IAsterixPropertiesProvider propertiesProvider;
protected ILSMMergePolicyFactory mergePolicyFactory;
protected Map<String, String> mergePolicyFactoryProperties;
protected RecordDescriptor enforcedRecDesc;
protected ARecordType enforcedItemType;
protected int numFilterFields;
protected List<String> filterFieldName;
protected ITypeTraits[] filterTypeTraits;
protected IBinaryComparatorFactory[] filterCmpFactories;
protected int[] secondaryFilterFields;
protected int[] primaryFilterFields;
protected int[] primaryBTreeFields;
protected int[] secondaryBTreeFields;
protected List<ExternalFile> externalFiles;
// Prevent public construction. Should be created via createIndexCreator().
protected SecondaryIndexOperationsHelper(PhysicalOptimizationConfig physOptConf,
IAsterixPropertiesProvider propertiesProvider) {
this.physOptConf = physOptConf;
this.propertiesProvider = propertiesProvider;
}
public static SecondaryIndexOperationsHelper createIndexOperationsHelper(IndexType indexType, String dataverseName,
String datasetName, String indexName, List<List<String>> secondaryKeyFields,
List<IAType> secondaryKeyTypes, boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
PhysicalOptimizationConfig physOptConf, ARecordType recType, ARecordType enforcedType)
throws AsterixException, AlgebricksException {
IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
SecondaryIndexOperationsHelper indexOperationsHelper = null;
switch (indexType) {
case BTREE: {
indexOperationsHelper = new SecondaryBTreeOperationsHelper(physOptConf, asterixPropertiesProvider);
break;
}
case RTREE: {
indexOperationsHelper = new SecondaryRTreeOperationsHelper(physOptConf, asterixPropertiesProvider);
break;
}
case SINGLE_PARTITION_WORD_INVIX:
case SINGLE_PARTITION_NGRAM_INVIX:
case LENGTH_PARTITIONED_WORD_INVIX:
case LENGTH_PARTITIONED_NGRAM_INVIX: {
indexOperationsHelper = new SecondaryInvertedIndexOperationsHelper(physOptConf,
asterixPropertiesProvider);
break;
}
default: {
throw new AsterixException("Unknown Index Type: " + indexType);
}
}
indexOperationsHelper.init(indexType, dataverseName, datasetName, indexName, secondaryKeyFields,
secondaryKeyTypes, isEnforced, gramLength, metadataProvider, recType, enforcedType);
return indexOperationsHelper;
}
public abstract JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException;
public abstract JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException;
public abstract JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException;
protected void init(IndexType indexType, String dvn, String dsn, String in, List<List<String>> secondaryKeyFields,
List<IAType> secondaryKeyTypes, boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
ARecordType aRecType, ARecordType enforcedType) throws AsterixException, AlgebricksException {
this.metadataProvider = metadataProvider;
dataverseName = dvn == null ? metadataProvider.getDefaultDataverseName() : dvn;
datasetName = dsn;
secondaryIndexName = in;
isEnforcingKeyTypes = isEnforced;
dataset = metadataProvider.findDataset(dataverseName, datasetName);
if (dataset == null) {
throw new AsterixException("Unknown dataset " + datasetName);
}
boolean temp = dataset.getDatasetDetails().isTemp();
itemType = aRecType;
enforcedItemType = enforcedType;
payloadSerde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(itemType);
numSecondaryKeys = secondaryKeyFields.size();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
.splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, secondaryIndexName, temp);
secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
secondaryPartitionConstraint = secondarySplitsAndConstraint.second;
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
numPrimaryKeys = ExternalIndexingOperations.getRIDSize(dataset);
} else {
filterFieldName = DatasetUtils.getFilterField(dataset);
if (filterFieldName != null) {
numFilterFields = 1;
} else {
numFilterFields = 0;
}
numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadataProvider
.splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
primaryFileSplitProvider = primarySplitsAndConstraint.first;
primaryPartitionConstraint = primarySplitsAndConstraint.second;
setPrimaryRecDescAndComparators();
}
setSecondaryRecDescAndComparators(indexType, secondaryKeyFields, secondaryKeyTypes, gramLength,
metadataProvider);
numElementsHint = metadataProvider.getCardinalityPerPartitionHint(dataset);
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
metadataProvider.getMetadataTxnContext());
mergePolicyFactory = compactionInfo.first;
mergePolicyFactoryProperties = compactionInfo.second;
if (numFilterFields > 0) {
setFilterTypeTraitsAndComparators();
}
}
protected void setFilterTypeTraitsAndComparators() throws AlgebricksException {
filterTypeTraits = new ITypeTraits[numFilterFields];
filterCmpFactories = new IBinaryComparatorFactory[numFilterFields];
secondaryFilterFields = new int[numFilterFields];
primaryFilterFields = new int[numFilterFields];
primaryBTreeFields = new int[numPrimaryKeys + 1];
secondaryBTreeFields = new int[numSecondaryKeys + numPrimaryKeys];
for (int i = 0; i < primaryBTreeFields.length; i++) {
primaryBTreeFields[i] = i;
}
for (int i = 0; i < secondaryBTreeFields.length; i++) {
secondaryBTreeFields[i] = i;
}
IAType type;
try {
type = itemType.getSubFieldType(filterFieldName);
} catch (IOException e) {
throw new AlgebricksException(e);
}
filterCmpFactories[0] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(type, true);
filterTypeTraits[0] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(type);
secondaryFilterFields[0] = getNumSecondaryKeys() + numPrimaryKeys;
primaryFilterFields[0] = numPrimaryKeys + 1;
}
protected abstract int getNumSecondaryKeys();
protected void setPrimaryRecDescAndComparators() throws AlgebricksException {
List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
int numPrimaryKeys = partitioningKeys.size();
ISerializerDeserializer[] primaryRecFields = new ISerializerDeserializer[numPrimaryKeys + 1];
ITypeTraits[] primaryTypeTraits = new ITypeTraits[numPrimaryKeys + 1];
primaryComparatorFactories = new IBinaryComparatorFactory[numPrimaryKeys];
primaryBloomFilterKeyFields = new int[numPrimaryKeys];
ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
for (int i = 0; i < numPrimaryKeys; i++) {
IAType keyType;
try {
keyType = itemType.getSubFieldType(partitioningKeys.get(i));
} catch (IOException e) {
throw new AlgebricksException(e);
}
primaryRecFields[i] = serdeProvider.getSerializerDeserializer(keyType);
primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
keyType, true);
primaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
primaryBloomFilterKeyFields[i] = i;
}
primaryRecFields[numPrimaryKeys] = payloadSerde;
primaryTypeTraits[numPrimaryKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
primaryRecDesc = new RecordDescriptor(primaryRecFields, primaryTypeTraits);
}
protected abstract void setSecondaryRecDescAndComparators(IndexType indexType,
List<List<String>> secondaryKeyFields, List<IAType> secondaryKeyTypes, int gramLength,
AqlMetadataProvider metadataProvider) throws AlgebricksException, AsterixException;
protected AbstractOperatorDescriptor createDummyKeyProviderOp(JobSpecification spec) throws AsterixException,
AlgebricksException {
// Build dummy tuple containing one field with a dummy value inside.
ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
DataOutput dos = tb.getDataOutput();
tb.reset();
try {
// Serialize dummy value into a field.
IntegerSerializerDeserializer.INSTANCE.serialize(0, dos);
} catch (HyracksDataException e) {
throw new AsterixException(e);
}
// Add dummy field.
tb.addFieldEndOffset();
ISerializerDeserializer[] keyRecDescSers = { IntegerSerializerDeserializer.INSTANCE };
RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, keyProviderOp,
primaryPartitionConstraint);
return keyProviderOp;
}
protected BTreeSearchOperatorDescriptor createPrimaryIndexScanOp(JobSpecification spec) throws AlgebricksException {
// -Infinity
int[] lowKeyFields = null;
// +Infinity
int[] highKeyFields = null;
ITransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
JobId jobId = JobIdFactory.generateJobId();
metadataProvider.setJobId(jobId);
boolean isWriteTransaction = metadataProvider.isWriteTransaction();
IJobletEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(jobId, isWriteTransaction);
spec.setJobletEventListenerFactory(jobEventListenerFactory);
boolean temp = dataset.getDatasetDetails().isTemp();
ISearchOperationCallbackFactory searchCallbackFactory = temp ? NoOpOperationCallbackFactory.INSTANCE
: new PrimaryIndexInstantSearchOperationCallbackFactory(jobId, dataset.getDatasetId(),
primaryBloomFilterKeyFields, txnSubsystemProvider, ResourceType.LSM_BTREE);
AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
primaryFileSplitProvider, primaryRecDesc.getTypeTraits(), primaryComparatorFactories,
primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true,
new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
mergePolicyFactory, mergePolicyFactoryProperties, new PrimaryIndexOperationTrackerProvider(
dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
primaryBTreeFields, primaryFilterFields, !temp), false, false, null,
searchCallbackFactory, null, null);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primarySearchOp,
primaryPartitionConstraint);
return primarySearchOp;
}
protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec,
AbstractOperatorDescriptor primaryScanOp, int numSecondaryKeyFields) throws AlgebricksException {
int[] outColumns = new int[numSecondaryKeyFields + numFilterFields];
int[] projectionList = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
for (int i = 0; i < numSecondaryKeyFields + numFilterFields; i++) {
outColumns[i] = numPrimaryKeys + i;
}
int projCount = 0;
for (int i = 0; i < numSecondaryKeyFields; i++) {
projectionList[projCount++] = numPrimaryKeys + i;
}
for (int i = 0; i < numPrimaryKeys; i++) {
projectionList[projCount++] = i;
}
if (numFilterFields > 0) {
projectionList[projCount++] = numPrimaryKeys + numSecondaryKeyFields;
}
IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
sefs[i] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
secondaryFieldAccessEvalFactories[i]);
}
AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixAssignOp,
primaryPartitionConstraint);
return asterixAssignOp;
}
protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec,
AbstractOperatorDescriptor primaryScanOp, int numSecondaryKeyFields, DatasetType dsType) {
CastRecordDescriptor castFuncDesc = (CastRecordDescriptor) CastRecordDescriptor.FACTORY
.createFunctionDescriptor();
castFuncDesc.reset(enforcedItemType, itemType);
int[] outColumns = new int[1];
int[] projectionList = new int[1 + numPrimaryKeys];
int recordIdx;
//external datascan operator returns a record as the first field, instead of the last in internal case
if (dsType == DatasetType.EXTERNAL) {
recordIdx = 0;
outColumns[0] = 0;
} else {
recordIdx = numPrimaryKeys;
outColumns[0] = numPrimaryKeys;
}
for (int i = 0; i <= numPrimaryKeys; i++) {
projectionList[i] = i;
}
ICopyEvaluatorFactory[] castEvalFact = new ICopyEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) };
IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1];
sefs[0] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
castFuncDesc.createEvaluatorFactory(castEvalFact));
AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
AlgebricksMetaOperatorDescriptor castRecAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc });
return castRecAssignOp;
}
protected ExternalSortOperatorDescriptor createSortOp(JobSpecification spec,
IBinaryComparatorFactory[] secondaryComparatorFactories, RecordDescriptor secondaryRecDesc) {
int[] sortFields = new int[secondaryComparatorFactories.length];
for (int i = 0; i < secondaryComparatorFactories.length; i++) {
sortFields[i] = i;
}
ExternalSortOperatorDescriptor sortOp = new ExternalSortOperatorDescriptor(spec,
physOptConf.getMaxFramesExternalSort(), sortFields, secondaryComparatorFactories, secondaryRecDesc);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, sortOp, primaryPartitionConstraint);
return sortOp;
}
protected TreeIndexBulkLoadOperatorDescriptor createTreeIndexBulkLoadOp(JobSpecification spec,
int numSecondaryKeyFields, IIndexDataflowHelperFactory dataflowHelperFactory, float fillFactor)
throws MetadataException, AlgebricksException {
int[] fieldPermutation = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
for (int i = 0; i < fieldPermutation.length; i++) {
fieldPermutation[i] = i;
}
TreeIndexBulkLoadOperatorDescriptor treeIndexBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(spec,
secondaryRecDesc, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories, secondaryBloomFilterKeyFields,
fieldPermutation, fillFactor, false, numElementsHint, false, dataflowHelperFactory);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, treeIndexBulkLoadOp,
secondaryPartitionConstraint);
return treeIndexBulkLoadOp;
}
public AlgebricksMetaOperatorDescriptor createFilterNullsSelectOp(JobSpecification spec, int numSecondaryKeyFields)
throws AlgebricksException {
ICopyEvaluatorFactory[] andArgsEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeyFields];
NotDescriptor notDesc = new NotDescriptor();
IsNullDescriptor isNullDesc = new IsNullDescriptor();
for (int i = 0; i < numSecondaryKeyFields; i++) {
// Access column i, and apply 'is not null'.
ColumnAccessEvalFactory columnAccessEvalFactory = new ColumnAccessEvalFactory(i);
ICopyEvaluatorFactory isNullEvalFactory = isNullDesc
.createEvaluatorFactory(new ICopyEvaluatorFactory[] { columnAccessEvalFactory });
ICopyEvaluatorFactory notEvalFactory = notDesc
.createEvaluatorFactory(new ICopyEvaluatorFactory[] { isNullEvalFactory });
andArgsEvalFactories[i] = notEvalFactory;
}
ICopyEvaluatorFactory selectCond = null;
if (numSecondaryKeyFields > 1) {
// Create conjunctive condition where all secondary index keys must
// satisfy 'is not null'.
AndDescriptor andDesc = new AndDescriptor();
selectCond = andDesc.createEvaluatorFactory(andArgsEvalFactories);
} else {
selectCond = andArgsEvalFactories[0];
}
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(
new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(selectCond),
null, AqlBinaryBooleanInspectorImpl.FACTORY, false, -1, null);
AlgebricksMetaOperatorDescriptor asterixSelectOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
new IPushRuntimeFactory[] { select }, new RecordDescriptor[] { secondaryRecDesc });
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixSelectOp,
primaryPartitionConstraint);
return asterixSelectOp;
}
// This method creates a source indexing operator for external data
protected ExternalDataScanOperatorDescriptor createExternalIndexingOp(JobSpecification spec)
throws AlgebricksException, AsterixException {
// A record + primary keys
ISerializerDeserializer[] serdes = new ISerializerDeserializer[1 + numPrimaryKeys];
ITypeTraits[] typeTraits = new ITypeTraits[1 + numPrimaryKeys];
// payload serde and type traits for the record slot
serdes[0] = payloadSerde;
typeTraits[0] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
// serdes and type traits for rid fields
for (int i = 1; i < serdes.length; i++) {
serdes[i] = IndexingConstants.getSerializerDeserializer(i - 1);
typeTraits[i] = IndexingConstants.getTypeTraits(i - 1);
}
// output record desc
RecordDescriptor indexerDesc = new RecordDescriptor(serdes, typeTraits);
// Create the operator and its partition constraits
Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> indexingOpAndConstraints;
try {
indexingOpAndConstraints = ExternalIndexingOperations.createExternalIndexingOp(spec, metadataProvider,
dataset, itemType, indexerDesc, externalFiles);
} catch (Exception e) {
throw new AlgebricksException(e);
}
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexingOpAndConstraints.first,
indexingOpAndConstraints.second);
// Set the primary partition constraints to this partition constraints
primaryPartitionConstraint = indexingOpAndConstraints.second;
return indexingOpAndConstraints.first;
}
protected AlgebricksMetaOperatorDescriptor createExternalAssignOp(JobSpecification spec, int numSecondaryKeys)
throws AlgebricksException {
int[] outColumns = new int[numSecondaryKeys];
int[] projectionList = new int[numSecondaryKeys + numPrimaryKeys];
for (int i = 0; i < numSecondaryKeys; i++) {
outColumns[i] = i + numPrimaryKeys + 1;
projectionList[i] = i + numPrimaryKeys + 1;
}
IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
sefs[i] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
secondaryFieldAccessEvalFactories[i]);
}
//add External RIDs to the projection list
for (int i = 0; i < numPrimaryKeys; i++) {
projectionList[numSecondaryKeys + i] = i + 1;
}
AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
return asterixAssignOp;
}
protected ExternalIndexBulkModifyOperatorDescriptor createExternalIndexBulkModifyOp(JobSpecification spec,
int numSecondaryKeyFields, IIndexDataflowHelperFactory dataflowHelperFactory, float fillFactor)
throws MetadataException, AlgebricksException {
int[] fieldPermutation = new int[numSecondaryKeyFields + numPrimaryKeys];
for (int i = 0; i < numSecondaryKeyFields + numPrimaryKeys; i++) {
fieldPermutation[i] = i;
}
// create a list of file ids
int numOfDeletedFiles = 0;
for (ExternalFile file : externalFiles) {
if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP)
numOfDeletedFiles++;
}
int[] deletedFiles = new int[numOfDeletedFiles];
int i = 0;
for (ExternalFile file : externalFiles) {
if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
deletedFiles[i] = file.getFileNumber();
}
}
ExternalIndexBulkModifyOperatorDescriptor treeIndexBulkLoadOp = new ExternalIndexBulkModifyOperatorDescriptor(
spec, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
secondaryComparatorFactories, secondaryBloomFilterKeyFields, dataflowHelperFactory,
NoOpOperationCallbackFactory.INSTANCE, deletedFiles, fieldPermutation, fillFactor, numElementsHint);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, treeIndexBulkLoadOp,
secondaryPartitionConstraint);
return treeIndexBulkLoadOp;
}
public List<ExternalFile> getExternalFiles() {
return externalFiles;
}
public void setExternalFiles(List<ExternalFile> externalFiles) {
this.externalFiles = externalFiles;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.properties;
import org.apache.camel.CamelContext;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.FailedToCreateRouteException;
import org.apache.camel.ResolveEndpointFailedException;
import org.apache.camel.builder.RouteBuilder;
/**
* @version
*/
public class PropertiesComponentTest extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
public void testPropertiesComponent() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:{{cool.end}}");
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
public void testPropertiesComponentResult() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:mock:{{cool.result}}");
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
public void testPropertiesComponentMockMock() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:{{cool.mock}}:{{cool.mock}}");
}
});
context.start();
getMockEndpoint("mock:mock").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
public void testPropertiesComponentConcat() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:cool.concat");
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
public void testPropertiesComponentLocationOverride() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:{{bar.end}}?locations=org/apache/camel/component/properties/bar.properties");
}
});
context.start();
getMockEndpoint("mock:bar").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
public void testPropertiesComponentLocationsOverride() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:bar.end?locations=org/apache/camel/component/properties/bar.properties");
from("direct:cheese").to("properties:cheese.end?locations=org/apache/camel/component/properties/bar.properties,"
+ "classpath:org/apache/camel/component/properties/cheese.properties");
}
});
context.start();
getMockEndpoint("mock:bar").expectedMessageCount(1);
getMockEndpoint("mock:cheese").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
template.sendBody("direct:cheese", "Hello Cheese");
assertMockEndpointsSatisfied();
}
public void testPropertiesComponentInvalidKey() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:{{foo.unknown}}");
}
});
try {
context.start();
fail("Should throw exception");
} catch (FailedToCreateRouteException e) {
ResolveEndpointFailedException cause = assertIsInstanceOf(ResolveEndpointFailedException.class, e.getCause());
IllegalArgumentException iae = assertIsInstanceOf(IllegalArgumentException.class, cause.getCause());
assertEquals("Property with key [foo.unknown] not found in properties from text: {{foo.unknown}}", iae.getMessage());
}
}
public void testPropertiesComponentCircularReference() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:cool.a");
}
});
try {
context.start();
fail("Should throw exception");
} catch (FailedToCreateRouteException e) {
ResolveEndpointFailedException cause = assertIsInstanceOf(ResolveEndpointFailedException.class, e.getCause());
IllegalArgumentException iae = assertIsInstanceOf(IllegalArgumentException.class, cause.getCause());
assertEquals("Circular reference detected with key [cool.a] from text: {{cool.a}}", iae.getMessage());
}
}
public void testPropertiesComponentCacheDefault() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
// properties component can also have {{ }} around but its not needed
from("direct:start").to("properties:{{cool.end}}");
from("direct:foo").to("properties:mock:{{cool.result}}");
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(2);
template.sendBody("direct:start", "Hello World");
template.sendBody("direct:foo", "Hello Foo");
assertMockEndpointsSatisfied();
}
public void testPropertiesComponentCacheDisabled() throws Exception {
PropertiesComponent pc = context.getComponent("properties", PropertiesComponent.class);
pc.setCache(false);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:cool.end");
from("direct:foo").to("properties:mock:{{cool.result}}");
}
});
context.start();
getMockEndpoint("mock:result").expectedMessageCount(2);
template.sendBody("direct:start", "Hello World");
template.sendBody("direct:foo", "Hello Foo");
assertMockEndpointsSatisfied();
}
public void testJvmSystemPropertyNotFound() throws Exception {
try {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("properties:xxx?locations=foo/${xxx}");
}
});
context.start();
fail("Should thrown an exception");
} catch (FailedToCreateRouteException e) {
IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, e.getCause().getCause());
assertEquals("Cannot find JVM system property with key: xxx", cause.getMessage());
}
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.addComponent("properties", new PropertiesComponent("classpath:org/apache/camel/component/properties/myproperties.properties"));
return context;
}
}
| |
package org.sirix.node.xml;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import com.google.common.hash.HashCode;
import org.sirix.api.visitor.VisitResult;
import org.sirix.api.visitor.XmlNodeVisitor;
import org.sirix.node.NodeKind;
import org.sirix.node.SirixDeweyID;
import org.sirix.node.delegates.NodeDelegate;
import org.sirix.node.delegates.StructNodeDelegate;
import org.sirix.node.delegates.ValueNodeDelegate;
import org.sirix.node.immutable.xml.ImmutableComment;
import org.sirix.node.interfaces.Node;
import org.sirix.node.interfaces.StructNode;
import org.sirix.node.interfaces.ValueNode;
import org.sirix.node.interfaces.immutable.ImmutableXmlNode;
import org.sirix.settings.Constants;
import org.sirix.settings.Fixed;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.math.BigInteger;
/**
* Comment node implementation.
*
* @author Johannes Lichtenberger
*
*/
public final class CommentNode extends AbstractStructForwardingNode implements ValueNode, ImmutableXmlNode {
/** {@link StructNodeDelegate} reference. */
private final StructNodeDelegate mStructNodeDel;
/** {@link ValueNodeDelegate} reference. */
private final ValueNodeDelegate mValDel;
/** Value of the node. */
private byte[] mValue;
private BigInteger mHash;
/**
* Constructor for TextNode.
*
* @param valDel delegate for {@link ValueNode} implementation
* @param structDel delegate for {@link StructNode} implementation
*/
public CommentNode(final BigInteger hashCode, final ValueNodeDelegate valDel, final StructNodeDelegate structDel) {
mHash = hashCode;
assert valDel != null;
mValDel = valDel;
assert structDel != null;
mStructNodeDel = structDel;
}
/**
* Constructor for TextNode.
*
* @param valDel delegate for {@link ValueNode} implementation
* @param structDel delegate for {@link StructNode} implementation
*/
public CommentNode(final ValueNodeDelegate valDel, final StructNodeDelegate structDel) {
assert valDel != null;
mValDel = valDel;
assert structDel != null;
mStructNodeDel = structDel;
}
@Override
public NodeKind getKind() {
return NodeKind.COMMENT;
}
@Override
public BigInteger computeHash() {
final HashCode valueHashCode = mStructNodeDel.getNodeDelegate().getHashFunction().hashBytes(getRawValue());
final BigInteger valueBigInteger = new BigInteger(1, valueHashCode.asBytes());
BigInteger result = BigInteger.ONE;
result = BigInteger.valueOf(31).multiply(result).add(mStructNodeDel.getNodeDelegate().computeHash());
result = BigInteger.valueOf(31).multiply(result).add(mStructNodeDel.computeHash());
result = BigInteger.valueOf(31).multiply(result).add(valueBigInteger);
return Node.to128BitsAtMaximumBigInteger(result);
}
@Override
public void setHash(final BigInteger hash) {
mHash = Node.to128BitsAtMaximumBigInteger(hash);
}
@Override
public BigInteger getHash() {
return mHash;
}
@Override
public byte[] getRawValue() {
if (mValue == null) {
mValue = mValDel.getRawValue();
}
return mValue;
}
@Override
public void setValue(final byte[] value) {
mValue = null;
mValDel.setValue(value);
}
@Override
public long getFirstChildKey() {
return Fixed.NULL_NODE_KEY.getStandardProperty();
}
@Override
public VisitResult acceptVisitor(final XmlNodeVisitor visitor) {
return visitor.visit(ImmutableComment.of(this));
}
@Override
public void decrementChildCount() {
throw new UnsupportedOperationException();
}
@Override
public void incrementChildCount() {
throw new UnsupportedOperationException();
}
@Override
public long getDescendantCount() {
return 0;
}
@Override
public void decrementDescendantCount() {
throw new UnsupportedOperationException();
}
@Override
public void incrementDescendantCount() {
throw new UnsupportedOperationException();
}
@Override
public void setDescendantCount(final long descendantCount) {
throw new UnsupportedOperationException();
}
@Override
public int hashCode() {
return Objects.hashCode(mStructNodeDel.getNodeDelegate(), mValDel);
}
@Override
public boolean equals(final @Nullable Object obj) {
if (obj instanceof CommentNode) {
final CommentNode other = (CommentNode) obj;
return Objects.equal(mStructNodeDel.getNodeDelegate(), other.getNodeDelegate()) && mValDel.equals(other.mValDel);
}
return false;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("node delegate", mStructNodeDel.getNodeDelegate())
.add("value delegate", mValDel)
.toString();
}
public ValueNodeDelegate getValNodeDelegate() {
return mValDel;
}
@Override
protected NodeDelegate delegate() {
return mStructNodeDel.getNodeDelegate();
}
@Override
protected StructNodeDelegate structDelegate() {
return mStructNodeDel;
}
@Override
public String getValue() {
return new String(mValDel.getRawValue(), Constants.DEFAULT_ENCODING);
}
@Override
public SirixDeweyID getDeweyID() {
return mStructNodeDel.getNodeDelegate().getDeweyID();
}
@Override
public int getTypeKey() {
return mStructNodeDel.getNodeDelegate().getTypeKey();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.api;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.security.KeyStore;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
import org.apache.avro.ipc.CallFuture;
import org.apache.avro.ipc.NettyTransceiver;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.commons.lang.StringUtils;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.FlumeException;
import org.apache.flume.source.avro.AvroFlumeEvent;
import org.apache.flume.source.avro.AvroSourceProtocol;
import org.apache.flume.source.avro.Status;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.socket.SocketChannel;
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
import org.jboss.netty.handler.codec.compression.ZlibDecoder;
import org.jboss.netty.handler.codec.compression.ZlibEncoder;
import org.jboss.netty.handler.ssl.SslHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Avro/Netty implementation of {@link RpcClient}.
* The connections are intended to be opened before clients are given access so
* that the object cannot ever be in an inconsistent when exposed to users.
*/
public class NettyAvroRpcClient extends AbstractRpcClient implements RpcClient {
private ExecutorService callTimeoutPool;
private final ReentrantLock stateLock = new ReentrantLock();
/**
* Guarded by {@code stateLock}
*/
private ConnState connState;
private InetSocketAddress address;
private boolean enableSsl;
private boolean trustAllCerts;
private String truststore;
private String truststorePassword;
private String truststoreType;
private final List<String> excludeProtocols = new LinkedList<String>();
private Transceiver transceiver;
private AvroSourceProtocol.Callback avroClient;
private static final Logger logger = LoggerFactory
.getLogger(NettyAvroRpcClient.class);
private boolean enableDeflateCompression;
private int compressionLevel;
private int maxIoWorkers;
/**
* This constructor is intended to be called from {@link RpcClientFactory}.
* A call to this constructor should be followed by call to configure().
*/
protected NettyAvroRpcClient(){
}
/**
* This method should only be invoked by the build function
* @throws FlumeException
*/
private void connect() throws FlumeException {
connect(connectTimeout, TimeUnit.MILLISECONDS);
}
/**
* Internal only, for now
* @param timeout
* @param tu
* @throws FlumeException
*/
private void connect(long timeout, TimeUnit tu) throws FlumeException {
callTimeoutPool = Executors.newCachedThreadPool(
new TransceiverThreadFactory("Flume Avro RPC Client Call Invoker"));
NioClientSocketChannelFactory socketChannelFactory = null;
try {
ExecutorService bossExecutor =
Executors.newCachedThreadPool(new TransceiverThreadFactory(
"Avro " + NettyTransceiver.class.getSimpleName() + " Boss"));
ExecutorService workerExecutor =
Executors.newCachedThreadPool(new TransceiverThreadFactory(
"Avro " + NettyTransceiver.class.getSimpleName() + " I/O Worker"));
if (enableDeflateCompression || enableSsl) {
if (maxIoWorkers >= 1) {
socketChannelFactory = new SSLCompressionChannelFactory(
bossExecutor, workerExecutor,
enableDeflateCompression, enableSsl, trustAllCerts,
compressionLevel, truststore, truststorePassword, truststoreType,
excludeProtocols, maxIoWorkers);
} else {
socketChannelFactory = new SSLCompressionChannelFactory(
bossExecutor, workerExecutor,
enableDeflateCompression, enableSsl, trustAllCerts,
compressionLevel, truststore, truststorePassword, truststoreType,
excludeProtocols);
}
} else {
if (maxIoWorkers >= 1) {
socketChannelFactory = new NioClientSocketChannelFactory(
bossExecutor, workerExecutor, maxIoWorkers);
} else {
socketChannelFactory = new NioClientSocketChannelFactory(
bossExecutor, workerExecutor);
}
}
transceiver = new NettyTransceiver(this.address,
socketChannelFactory,
tu.toMillis(timeout));
avroClient =
SpecificRequestor.getClient(AvroSourceProtocol.Callback.class,
transceiver);
} catch (Throwable t) {
if (callTimeoutPool != null) {
callTimeoutPool.shutdownNow();
}
if (socketChannelFactory != null) {
socketChannelFactory.releaseExternalResources();
}
if (t instanceof IOException) {
throw new FlumeException(this + ": RPC connection error", t);
} else if (t instanceof FlumeException) {
throw (FlumeException) t;
} else if (t instanceof Error) {
throw (Error) t;
} else {
throw new FlumeException(this + ": Unexpected exception", t);
}
}
setState(ConnState.READY);
}
@Override
public void close() throws FlumeException {
if (callTimeoutPool != null) {
callTimeoutPool.shutdown();
try {
if (!callTimeoutPool.awaitTermination(requestTimeout,
TimeUnit.MILLISECONDS)) {
callTimeoutPool.shutdownNow();
if (!callTimeoutPool.awaitTermination(requestTimeout,
TimeUnit.MILLISECONDS)) {
logger.warn(this + ": Unable to cleanly shut down call timeout " +
"pool");
}
}
} catch (InterruptedException ex) {
logger.warn(this + ": Interrupted during close", ex);
// re-cancel if current thread also interrupted
callTimeoutPool.shutdownNow();
// preserve interrupt status
Thread.currentThread().interrupt();
}
callTimeoutPool = null;
}
try {
transceiver.close();
} catch (IOException ex) {
throw new FlumeException(this + ": Error closing transceiver.", ex);
} finally {
setState(ConnState.DEAD);
}
}
@Override
public String toString() {
return "NettyAvroRpcClient { host: " + address.getHostName() + ", port: " +
address.getPort() + " }";
}
@Override
public void append(Event event) throws EventDeliveryException {
try {
append(event, requestTimeout, TimeUnit.MILLISECONDS);
} catch (Throwable t) {
// we mark as no longer active without trying to clean up resources
// client is required to call close() to clean up resources
setState(ConnState.DEAD);
if (t instanceof Error) {
throw (Error) t;
}
if (t instanceof TimeoutException) {
throw new EventDeliveryException(this + ": Failed to send event. " +
"RPC request timed out after " + requestTimeout + "ms", t);
}
throw new EventDeliveryException(this + ": Failed to send event", t);
}
}
private void append(Event event, long timeout, TimeUnit tu)
throws EventDeliveryException {
assertReady();
final CallFuture<Status> callFuture = new CallFuture<Status>();
final AvroFlumeEvent avroEvent = new AvroFlumeEvent();
avroEvent.setBody(ByteBuffer.wrap(event.getBody()));
avroEvent.setHeaders(toCharSeqMap(event.getHeaders()));
Future<Void> handshake;
try {
// due to AVRO-1122, avroClient.append() may block
handshake = callTimeoutPool.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
avroClient.append(avroEvent, callFuture);
return null;
}
});
} catch (RejectedExecutionException ex) {
throw new EventDeliveryException(this + ": Executor error", ex);
}
try {
handshake.get(connectTimeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException ex) {
throw new EventDeliveryException(this + ": Handshake timed out after " +
connectTimeout + " ms", ex);
} catch (InterruptedException ex) {
throw new EventDeliveryException(this + ": Interrupted in handshake", ex);
} catch (ExecutionException ex) {
throw new EventDeliveryException(this + ": RPC request exception", ex);
} catch (CancellationException ex) {
throw new EventDeliveryException(this + ": RPC request cancelled", ex);
} finally {
if (!handshake.isDone()) {
handshake.cancel(true);
}
}
waitForStatusOK(callFuture, timeout, tu);
}
@Override
public void appendBatch(List<Event> events) throws EventDeliveryException {
try {
appendBatch(events, requestTimeout, TimeUnit.MILLISECONDS);
} catch (Throwable t) {
// we mark as no longer active without trying to clean up resources
// client is required to call close() to clean up resources
setState(ConnState.DEAD);
if (t instanceof Error) {
throw (Error) t;
}
if (t instanceof TimeoutException) {
throw new EventDeliveryException(this + ": Failed to send event. " +
"RPC request timed out after " + requestTimeout + " ms", t);
}
throw new EventDeliveryException(this + ": Failed to send batch", t);
}
}
private void appendBatch(List<Event> events, long timeout, TimeUnit tu)
throws EventDeliveryException {
assertReady();
Iterator<Event> iter = events.iterator();
final List<AvroFlumeEvent> avroEvents = new LinkedList<AvroFlumeEvent>();
// send multiple batches... bail if there is a problem at any time
while (iter.hasNext()) {
avroEvents.clear();
for (int i = 0; i < batchSize && iter.hasNext(); i++) {
Event event = iter.next();
AvroFlumeEvent avroEvent = new AvroFlumeEvent();
avroEvent.setBody(ByteBuffer.wrap(event.getBody()));
avroEvent.setHeaders(toCharSeqMap(event.getHeaders()));
avroEvents.add(avroEvent);
}
final CallFuture<Status> callFuture = new CallFuture<Status>();
Future<Void> handshake;
try {
// due to AVRO-1122, avroClient.appendBatch() may block
handshake = callTimeoutPool.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
avroClient.appendBatch(avroEvents, callFuture);
return null;
}
});
} catch (RejectedExecutionException ex) {
throw new EventDeliveryException(this + ": Executor error", ex);
}
try {
handshake.get(connectTimeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException ex) {
throw new EventDeliveryException(this + ": Handshake timed out after " +
connectTimeout + "ms", ex);
} catch (InterruptedException ex) {
throw new EventDeliveryException(this + ": Interrupted in handshake",
ex);
} catch (ExecutionException ex) {
throw new EventDeliveryException(this + ": RPC request exception", ex);
} catch (CancellationException ex) {
throw new EventDeliveryException(this + ": RPC request cancelled", ex);
} finally {
if (!handshake.isDone()) {
handshake.cancel(true);
}
}
waitForStatusOK(callFuture, timeout, tu);
}
}
/**
* Helper method that waits for a Status future to come back and validates
* that it returns Status == OK.
* @param callFuture Future to wait on
* @param timeout Time to wait before failing
* @param tu Time Unit of {@code timeout}
* @throws EventDeliveryException If there is a timeout or if Status != OK
*/
private void waitForStatusOK(CallFuture<Status> callFuture,
long timeout, TimeUnit tu) throws EventDeliveryException {
try {
Status status = callFuture.get(timeout, tu);
if (status != Status.OK) {
throw new EventDeliveryException(this + ": Avro RPC call returned " +
"Status: " + status);
}
} catch (CancellationException ex) {
throw new EventDeliveryException(this + ": RPC future was cancelled", ex);
} catch (ExecutionException ex) {
throw new EventDeliveryException(this + ": Exception thrown from " +
"remote handler", ex);
} catch (TimeoutException ex) {
throw new EventDeliveryException(this + ": RPC request timed out", ex);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
throw new EventDeliveryException(this + ": RPC request interrupted", ex);
}
}
/**
* This method should always be used to change {@code connState} so we ensure
* that invalid state transitions do not occur and that the {@code isIdle}
* {@link Condition} variable gets signaled reliably.
* Throws {@code IllegalStateException} when called to transition from CLOSED
* to another state.
* @param newState
*/
private void setState(ConnState newState) {
stateLock.lock();
try {
if (connState == ConnState.DEAD && connState != newState) {
throw new IllegalStateException("Cannot transition from CLOSED state.");
}
connState = newState;
} finally {
stateLock.unlock();
}
}
/**
* If the connection state != READY, throws {@link EventDeliveryException}.
*/
private void assertReady() throws EventDeliveryException {
stateLock.lock();
try {
ConnState curState = connState;
if (curState != ConnState.READY) {
throw new EventDeliveryException("RPC failed, client in an invalid " +
"state: " + curState);
}
} finally {
stateLock.unlock();
}
}
/**
* Helper function to convert a map of String to a map of CharSequence.
*/
private static Map<CharSequence, CharSequence> toCharSeqMap(
Map<String, String> stringMap) {
Map<CharSequence, CharSequence> charSeqMap =
new HashMap<CharSequence, CharSequence>();
for (Map.Entry<String, String> entry : stringMap.entrySet()) {
charSeqMap.put(entry.getKey(), entry.getValue());
}
return charSeqMap;
}
@Override
public boolean isActive() {
stateLock.lock();
try {
return (connState == ConnState.READY);
} finally {
stateLock.unlock();
}
}
private static enum ConnState {
INIT, READY, DEAD
}
/**
* <p>
* Configure the actual client using the properties.
* <tt>properties</tt> should have at least 2 params:
* <p><tt>hosts</tt> = <i>alias_for_host</i></p>
* <p><tt>alias_for_host</tt> = <i>hostname:port</i>. </p>
* Only the first host is added, rest are discarded.</p>
* <p>Optionally it can also have a <p>
* <tt>batch-size</tt> = <i>batchSize</i>
* @param properties The properties to instantiate the client with.
* @return
*/
@Override
public synchronized void configure(Properties properties)
throws FlumeException {
stateLock.lock();
try {
if (connState == ConnState.READY || connState == ConnState.DEAD) {
throw new FlumeException("This client was already configured, " +
"cannot reconfigure.");
}
} finally {
stateLock.unlock();
}
// batch size
String strBatchSize = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_BATCH_SIZE);
logger.debug("Batch size string = " + strBatchSize);
batchSize = RpcClientConfigurationConstants.DEFAULT_BATCH_SIZE;
if (strBatchSize != null && !strBatchSize.isEmpty()) {
try {
int parsedBatch = Integer.parseInt(strBatchSize);
if (parsedBatch < 1) {
logger.warn("Invalid value for batchSize: {}; Using default value.", parsedBatch);
} else {
batchSize = parsedBatch;
}
} catch (NumberFormatException e) {
logger.warn("Batchsize is not valid for RpcClient: " + strBatchSize +
". Default value assigned.", e);
}
}
// host and port
String hostNames = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_HOSTS);
String[] hosts = null;
if (hostNames != null && !hostNames.isEmpty()) {
hosts = hostNames.split("\\s+");
} else {
throw new FlumeException("Hosts list is invalid: " + hostNames);
}
if (hosts.length > 1) {
logger.warn("More than one hosts are specified for the default client. "
+ "Only the first host will be used and others ignored. Specified: "
+ hostNames + "; to be used: " + hosts[0]);
}
String host = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_HOSTS_PREFIX + hosts[0]);
if (host == null || host.isEmpty()) {
throw new FlumeException("Host not found: " + hosts[0]);
}
String[] hostAndPort = host.split(":");
if (hostAndPort.length != 2) {
throw new FlumeException("Invalid hostname: " + hosts[0]);
}
Integer port = null;
try {
port = Integer.parseInt(hostAndPort[1]);
} catch (NumberFormatException e) {
throw new FlumeException("Invalid Port: " + hostAndPort[1], e);
}
this.address = new InetSocketAddress(hostAndPort[0], port);
// connect timeout
connectTimeout =
RpcClientConfigurationConstants.DEFAULT_CONNECT_TIMEOUT_MILLIS;
String strConnTimeout = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_CONNECT_TIMEOUT);
if (strConnTimeout != null && strConnTimeout.trim().length() > 0) {
try {
connectTimeout = Long.parseLong(strConnTimeout);
if (connectTimeout < 1000) {
logger.warn("Connection timeout specified less than 1s. " +
"Using default value instead.");
connectTimeout =
RpcClientConfigurationConstants.DEFAULT_CONNECT_TIMEOUT_MILLIS;
}
} catch (NumberFormatException ex) {
logger.error("Invalid connect timeout specified: " + strConnTimeout);
}
}
// request timeout
requestTimeout =
RpcClientConfigurationConstants.DEFAULT_REQUEST_TIMEOUT_MILLIS;
String strReqTimeout = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_REQUEST_TIMEOUT);
if (strReqTimeout != null && strReqTimeout.trim().length() > 0) {
try {
requestTimeout = Long.parseLong(strReqTimeout);
if (requestTimeout < 1000) {
logger.warn("Request timeout specified less than 1s. " +
"Using default value instead.");
requestTimeout =
RpcClientConfigurationConstants.DEFAULT_REQUEST_TIMEOUT_MILLIS;
}
} catch (NumberFormatException ex) {
logger.error("Invalid request timeout specified: " + strReqTimeout);
}
}
String enableCompressionStr =
properties.getProperty(RpcClientConfigurationConstants.CONFIG_COMPRESSION_TYPE);
if (enableCompressionStr != null && enableCompressionStr.equalsIgnoreCase("deflate")) {
this.enableDeflateCompression = true;
String compressionLvlStr =
properties.getProperty(RpcClientConfigurationConstants.CONFIG_COMPRESSION_LEVEL);
compressionLevel = RpcClientConfigurationConstants.DEFAULT_COMPRESSION_LEVEL;
if (compressionLvlStr != null) {
try {
compressionLevel = Integer.parseInt(compressionLvlStr);
} catch (NumberFormatException ex) {
logger.error("Invalid compression level: " + compressionLvlStr);
}
}
}
enableSsl = Boolean.parseBoolean(properties.getProperty(
RpcClientConfigurationConstants.CONFIG_SSL));
trustAllCerts = Boolean.parseBoolean(properties.getProperty(
RpcClientConfigurationConstants.CONFIG_TRUST_ALL_CERTS));
truststore = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_TRUSTSTORE);
truststorePassword = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_TRUSTSTORE_PASSWORD);
truststoreType = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_TRUSTSTORE_TYPE, "JKS");
String excludeProtocolsStr = properties.getProperty(
RpcClientConfigurationConstants.CONFIG_EXCLUDE_PROTOCOLS);
if (excludeProtocolsStr == null) {
excludeProtocols.add("SSLv3");
} else {
excludeProtocols.addAll(Arrays.asList(excludeProtocolsStr.split(" ")));
if (!excludeProtocols.contains("SSLv3")) {
excludeProtocols.add("SSLv3");
}
}
String maxIoWorkersStr = properties.getProperty(RpcClientConfigurationConstants.MAX_IO_WORKERS);
if (!StringUtils.isEmpty(maxIoWorkersStr)) {
try {
maxIoWorkers = Integer.parseInt(maxIoWorkersStr);
} catch (NumberFormatException ex) {
logger.warn("Invalid maxIOWorkers:" + maxIoWorkersStr + " Using " +
"default maxIOWorkers.");
maxIoWorkers = -1;
}
}
if (maxIoWorkers < 1) {
logger.warn("Using default maxIOWorkers");
maxIoWorkers = -1;
}
this.connect();
}
/**
* A thread factor implementation modeled after the implementation of
* NettyTransceiver.NettyTransceiverThreadFactory class which is
* a private static class. The only difference between that and this
* implementation is that this implementation marks all the threads daemon
* which allows the termination of the VM when the non-daemon threads
* are done.
*/
private static class TransceiverThreadFactory implements ThreadFactory {
private final AtomicInteger threadId = new AtomicInteger(0);
private final String prefix;
/**
* Creates a TransceiverThreadFactory that creates threads with the
* specified name.
* @param prefix the name prefix to use for all threads created by this
* ThreadFactory. A unique ID will be appended to this prefix to form the
* final thread name.
*/
public TransceiverThreadFactory(String prefix) {
this.prefix = prefix;
}
@Override
public Thread newThread(Runnable r) {
Thread thread = new Thread(r);
thread.setDaemon(true);
thread.setName(prefix + " " + threadId.incrementAndGet());
return thread;
}
}
/**
* Factory of SSL-enabled client channels
* Copied from Avro's org.apache.avro.ipc.TestNettyServerWithSSL test
*/
private static class SSLCompressionChannelFactory extends NioClientSocketChannelFactory {
private final boolean enableCompression;
private final int compressionLevel;
private final boolean enableSsl;
private final boolean trustAllCerts;
private final String truststore;
private final String truststorePassword;
private final String truststoreType;
private final List<String> excludeProtocols;
public SSLCompressionChannelFactory(Executor bossExecutor, Executor workerExecutor,
boolean enableCompression, boolean enableSsl, boolean trustAllCerts,
int compressionLevel, String truststore, String truststorePassword,
String truststoreType, List<String> excludeProtocols) {
super(bossExecutor, workerExecutor);
this.enableCompression = enableCompression;
this.enableSsl = enableSsl;
this.compressionLevel = compressionLevel;
this.trustAllCerts = trustAllCerts;
this.truststore = truststore;
this.truststorePassword = truststorePassword;
this.truststoreType = truststoreType;
this.excludeProtocols = excludeProtocols;
}
public SSLCompressionChannelFactory(Executor bossExecutor, Executor workerExecutor,
boolean enableCompression, boolean enableSsl, boolean trustAllCerts,
int compressionLevel, String truststore, String truststorePassword,
String truststoreType, List<String> excludeProtocols, int maxIOWorkers) {
super(bossExecutor, workerExecutor, maxIOWorkers);
this.enableCompression = enableCompression;
this.enableSsl = enableSsl;
this.compressionLevel = compressionLevel;
this.trustAllCerts = trustAllCerts;
this.truststore = truststore;
this.truststorePassword = truststorePassword;
this.truststoreType = truststoreType;
this.excludeProtocols = excludeProtocols;
}
@Override
public SocketChannel newChannel(ChannelPipeline pipeline) {
TrustManager[] managers;
try {
if (enableCompression) {
ZlibEncoder encoder = new ZlibEncoder(compressionLevel);
pipeline.addFirst("deflater", encoder);
pipeline.addFirst("inflater", new ZlibDecoder());
}
if (enableSsl) {
if (trustAllCerts) {
logger.warn("No truststore configured, setting TrustManager to accept"
+ " all server certificates");
managers = new TrustManager[] { new PermissiveTrustManager() };
} else {
KeyStore keystore = null;
if (truststore != null) {
if (truststorePassword == null) {
throw new NullPointerException("truststore password is null");
}
InputStream truststoreStream = new FileInputStream(truststore);
keystore = KeyStore.getInstance(truststoreType);
keystore.load(truststoreStream, truststorePassword.toCharArray());
}
TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
// null keystore is OK, with SunX509 it defaults to system CA Certs
// see http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#X509TrustManager
tmf.init(keystore);
managers = tmf.getTrustManagers();
}
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, managers, null);
SSLEngine sslEngine = sslContext.createSSLEngine();
sslEngine.setUseClientMode(true);
List<String> enabledProtocols = new ArrayList<String>();
for (String protocol : sslEngine.getEnabledProtocols()) {
if (!excludeProtocols.contains(protocol)) {
enabledProtocols.add(protocol);
}
}
sslEngine.setEnabledProtocols(enabledProtocols.toArray(new String[0]));
logger.info("SSLEngine protocols enabled: " +
Arrays.asList(sslEngine.getEnabledProtocols()));
// addFirst() will make SSL handling the first stage of decoding
// and the last stage of encoding this must be added after
// adding compression handling above
pipeline.addFirst("ssl", new SslHandler(sslEngine));
}
return super.newChannel(pipeline);
} catch (Exception ex) {
logger.error("Cannot create SSL channel", ex);
throw new RuntimeException("Cannot create SSL channel", ex);
}
}
}
/**
* Permissive trust manager accepting any certificate
*/
private static class PermissiveTrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] certs, String s) {
// nothing
}
@Override
public void checkServerTrusted(X509Certificate[] certs, String s) {
// nothing
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bulk;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SparseFixedBitSet;
import org.elasticsearch.Assertions;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.admin.indices.create.AutoCreateAction;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.ingest.IngestActionForwarder;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.action.update.TransportUpdateAction;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.IndexingPressure;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndexClosedException;
import org.elasticsearch.indices.SystemIndices;
import org.elasticsearch.ingest.IngestService;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Names;
import org.elasticsearch.transport.TransportService;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicIntegerArray;
import java.util.function.LongSupplier;
import java.util.stream.Collectors;
import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.EXCLUDED_DATA_STREAMS_KEY;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
/**
* Groups bulk request items by shard, optionally creating non-existent indices and
* delegates to {@link TransportShardBulkAction} for shard-level bulk execution
*/
public class TransportBulkAction extends HandledTransportAction<BulkRequest, BulkResponse> {
private static final Logger logger = LogManager.getLogger(TransportBulkAction.class);
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final IngestService ingestService;
private final LongSupplier relativeTimeProvider;
private final IngestActionForwarder ingestForwarder;
private final NodeClient client;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private static final String DROPPED_ITEM_WITH_AUTO_GENERATED_ID = "auto-generated";
private final IndexingPressure indexingPressure;
private final SystemIndices systemIndices;
@Inject
public TransportBulkAction(ThreadPool threadPool, TransportService transportService,
ClusterService clusterService, IngestService ingestService,
NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
IndexingPressure indexingPressure, SystemIndices systemIndices) {
this(threadPool, transportService, clusterService, ingestService, client, actionFilters,
indexNameExpressionResolver, indexingPressure, systemIndices, System::nanoTime);
}
public TransportBulkAction(ThreadPool threadPool, TransportService transportService,
ClusterService clusterService, IngestService ingestService,
NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
IndexingPressure indexingPressure, SystemIndices systemIndices,
LongSupplier relativeTimeProvider) {
super(BulkAction.NAME, transportService, actionFilters, BulkRequest::new, ThreadPool.Names.SAME);
Objects.requireNonNull(relativeTimeProvider);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.ingestService = ingestService;
this.relativeTimeProvider = relativeTimeProvider;
this.ingestForwarder = new IngestActionForwarder(transportService);
this.client = client;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.indexingPressure = indexingPressure;
this.systemIndices = systemIndices;
clusterService.addStateApplier(this.ingestForwarder);
}
/**
* Retrieves the {@link IndexRequest} from the provided {@link DocWriteRequest} for index or upsert actions. Upserts are
* modeled as {@link IndexRequest} inside the {@link UpdateRequest}. Ignores {@link org.elasticsearch.action.delete.DeleteRequest}'s
*
* @param docWriteRequest The request to find the {@link IndexRequest}
* @return the found {@link IndexRequest} or {@code null} if one can not be found.
*/
public static IndexRequest getIndexWriteRequest(DocWriteRequest<?> docWriteRequest) {
IndexRequest indexRequest = null;
if (docWriteRequest instanceof IndexRequest) {
indexRequest = (IndexRequest) docWriteRequest;
} else if (docWriteRequest instanceof UpdateRequest) {
UpdateRequest updateRequest = (UpdateRequest) docWriteRequest;
indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest();
}
return indexRequest;
}
@Override
protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener<BulkResponse> listener) {
final int indexingOps = bulkRequest.numberOfActions();
final long indexingBytes = bulkRequest.ramBytesUsed();
final boolean isOnlySystem = isOnlySystem(bulkRequest, clusterService.state().metadata().getIndicesLookup(), systemIndices);
final Releasable releasable = indexingPressure.markCoordinatingOperationStarted(indexingOps, indexingBytes, isOnlySystem);
final ActionListener<BulkResponse> releasingListener = ActionListener.runBefore(listener, releasable::close);
final String executorName = isOnlySystem ? Names.SYSTEM_WRITE : Names.WRITE;
try {
doInternalExecute(task, bulkRequest, executorName, releasingListener);
} catch (Exception e) {
releasingListener.onFailure(e);
}
}
protected void doInternalExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener<BulkResponse> listener) {
final long startTime = relativeTime();
final AtomicArray<BulkItemResponse> responses = new AtomicArray<>(bulkRequest.requests.size());
boolean hasIndexRequestsWithPipelines = false;
final Metadata metadata = clusterService.state().getMetadata();
final Version minNodeVersion = clusterService.state().getNodes().getMinNodeVersion();
for (DocWriteRequest<?> actionRequest : bulkRequest.requests) {
IndexRequest indexRequest = getIndexWriteRequest(actionRequest);
if (indexRequest != null) {
// Each index request needs to be evaluated, because this method also modifies the IndexRequest
boolean indexRequestHasPipeline = IngestService.resolvePipelines(actionRequest, indexRequest, metadata);
hasIndexRequestsWithPipelines |= indexRequestHasPipeline;
}
if (actionRequest instanceof IndexRequest) {
IndexRequest ir = (IndexRequest) actionRequest;
ir.checkAutoIdWithOpTypeCreateSupportedByVersion(minNodeVersion);
if (ir.getAutoGeneratedTimestamp() != IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP) {
throw new IllegalArgumentException("autoGeneratedTimestamp should not be set externally");
}
}
}
if (hasIndexRequestsWithPipelines) {
// this method (doExecute) will be called again, but with the bulk requests updated from the ingest node processing but
// also with IngestService.NOOP_PIPELINE_NAME on each request. This ensures that this on the second time through this method,
// this path is never taken.
try {
if (Assertions.ENABLED) {
final boolean arePipelinesResolved = bulkRequest.requests()
.stream()
.map(TransportBulkAction::getIndexWriteRequest)
.filter(Objects::nonNull)
.allMatch(IndexRequest::isPipelineResolved);
assert arePipelinesResolved : bulkRequest;
}
if (clusterService.localNode().isIngestNode()) {
processBulkIndexIngestRequest(task, bulkRequest, executorName, listener);
} else {
ingestForwarder.forwardIngestRequest(BulkAction.INSTANCE, bulkRequest, listener);
}
} catch (Exception e) {
listener.onFailure(e);
}
return;
}
// Attempt to create all the indices that we're going to need during the bulk before we start.
// Step 1: collect all the indices in the request
final Map<String, Boolean> indices = bulkRequest.requests.stream()
// delete requests should not attempt to create the index (if the index does not
// exists), unless an external versioning is used
.filter(request -> request.opType() != DocWriteRequest.OpType.DELETE
|| request.versionType() == VersionType.EXTERNAL
|| request.versionType() == VersionType.EXTERNAL_GTE)
.collect(Collectors.toMap(DocWriteRequest::index, DocWriteRequest::isRequireAlias, (v1, v2) -> v1 || v2));
// Step 2: filter the list of indices to find those that don't currently exist.
final Map<String, IndexNotFoundException> indicesThatCannotBeCreated = new HashMap<>();
Set<String> autoCreateIndices = new HashSet<>();
ClusterState state = clusterService.state();
for (Map.Entry<String, Boolean> indexAndFlag : indices.entrySet()) {
final String index = indexAndFlag.getKey();
boolean shouldAutoCreate = indexNameExpressionResolver.hasIndexAbstraction(index, state) == false;
// We should only auto create if we are not requiring it to be an alias
if (shouldAutoCreate && (indexAndFlag.getValue() == false)) {
autoCreateIndices.add(index);
}
}
// Step 3: create all the indices that are missing, if there are any missing. start the bulk after all the creates come back.
if (autoCreateIndices.isEmpty()) {
executeBulk(task, bulkRequest, startTime, listener, responses, indicesThatCannotBeCreated);
} else {
final AtomicInteger counter = new AtomicInteger(autoCreateIndices.size());
for (String index : autoCreateIndices) {
createIndex(index, bulkRequest.timeout(), minNodeVersion, new ActionListener<>() {
@Override
public void onResponse(CreateIndexResponse result) {
if (counter.decrementAndGet() == 0) {
threadPool.executor(executorName).execute(new ActionRunnable<>(listener) {
@Override
protected void doRun() {
executeBulk(task, bulkRequest, startTime, listener, responses, indicesThatCannotBeCreated);
}
});
}
}
@Override
public void onFailure(Exception e) {
final Throwable cause = ExceptionsHelper.unwrapCause(e);
if (cause instanceof IndexNotFoundException) {
indicesThatCannotBeCreated.put(index, (IndexNotFoundException) e);
}
else if ((cause instanceof ResourceAlreadyExistsException) == false) {
// fail all requests involving this index, if create didn't work
for (int i = 0; i < bulkRequest.requests.size(); i++) {
DocWriteRequest<?> request = bulkRequest.requests.get(i);
if (request != null && setResponseFailureIfIndexMatches(responses, i, request, index, e)) {
bulkRequest.requests.set(i, null);
}
}
}
if (counter.decrementAndGet() == 0) {
final ActionListener<BulkResponse> wrappedListener = ActionListener.wrap(listener::onResponse, inner -> {
inner.addSuppressed(e);
listener.onFailure(inner);
});
threadPool.executor(executorName).execute(new ActionRunnable<>(wrappedListener) {
@Override
protected void doRun() {
executeBulk(task, bulkRequest, startTime, wrappedListener, responses, indicesThatCannotBeCreated);
}
@Override
public void onRejection(Exception rejectedException) {
rejectedException.addSuppressed(e);
super.onRejection(rejectedException);
}
});
}
}
});
}
}
}
static void prohibitAppendWritesInBackingIndices(DocWriteRequest<?> writeRequest, Metadata metadata) {
IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(writeRequest.index());
if (indexAbstraction == null) {
return;
}
if (indexAbstraction.getType() != IndexAbstraction.Type.CONCRETE_INDEX) {
return;
}
if (indexAbstraction.getParentDataStream() == null) {
return;
}
DataStream dataStream = indexAbstraction.getParentDataStream().getDataStream();
// At this point with write op is targeting a backing index of a data stream directly,
// so checking if write op is append-only and if so fail.
// (Updates and deletes are allowed to target a backing index)
DocWriteRequest.OpType opType = writeRequest.opType();
// CREATE op_type is considered append-only and
// INDEX op_type is considered append-only when no if_primary_term and if_seq_no is specified.
// (the latter maybe an update, but at this stage we can't determine that. In order to determine
// that an engine level change is needed and for now this check is sufficient.)
if (opType == DocWriteRequest.OpType.CREATE) {
throw new IllegalArgumentException("index request with op_type=create targeting backing indices is disallowed, " +
"target corresponding data stream [" + dataStream.getName() + "] instead");
}
if (opType == DocWriteRequest.OpType.INDEX && writeRequest.ifPrimaryTerm() == UNASSIGNED_PRIMARY_TERM &&
writeRequest.ifSeqNo() == UNASSIGNED_SEQ_NO) {
throw new IllegalArgumentException("index request with op_type=index and no if_primary_term and if_seq_no set " +
"targeting backing indices is disallowed, target corresponding data stream [" + dataStream.getName() + "] instead");
}
}
static void prohibitCustomRoutingOnDataStream(DocWriteRequest<?> writeRequest, Metadata metadata) {
IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(writeRequest.index());
if (indexAbstraction == null) {
return;
}
if (indexAbstraction.getType() != IndexAbstraction.Type.DATA_STREAM) {
return;
}
if (writeRequest.routing() != null) {
IndexAbstraction.DataStream dataStream = (IndexAbstraction.DataStream) indexAbstraction;
throw new IllegalArgumentException("index request targeting data stream [" + dataStream.getName() + "] specifies a custom " +
"routing. target the backing indices directly or remove the custom routing.");
}
}
boolean isOnlySystem(BulkRequest request, SortedMap<String, IndexAbstraction> indicesLookup, SystemIndices systemIndices) {
return request.getIndices().stream().allMatch(indexName -> isSystemIndex(indicesLookup, systemIndices, indexName));
}
private boolean isSystemIndex(SortedMap<String, IndexAbstraction> indicesLookup, SystemIndices systemIndices, String indexName) {
final IndexAbstraction abstraction = indicesLookup.get(indexName);
if (abstraction != null) {
return abstraction.isSystem();
} else {
return systemIndices.isSystemIndex(indexName);
}
}
void createIndex(String index,
TimeValue timeout,
Version minNodeVersion,
ActionListener<CreateIndexResponse> listener) {
CreateIndexRequest createIndexRequest = new CreateIndexRequest();
createIndexRequest.index(index);
createIndexRequest.cause("auto(bulk api)");
createIndexRequest.masterNodeTimeout(timeout);
client.execute(AutoCreateAction.INSTANCE, createIndexRequest, listener);
}
private boolean setResponseFailureIfIndexMatches(AtomicArray<BulkItemResponse> responses, int idx, DocWriteRequest<?> request,
String index, Exception e) {
if (index.equals(request.index())) {
responses.set(idx, new BulkItemResponse(idx, request.opType(), new BulkItemResponse.Failure(request.index(),
request.id(), e)));
return true;
}
return false;
}
private long buildTookInMillis(long startTimeNanos) {
return TimeUnit.NANOSECONDS.toMillis(relativeTime() - startTimeNanos);
}
/**
* retries on retryable cluster blocks, resolves item requests,
* constructs shard bulk requests and delegates execution to shard bulk action
* */
private final class BulkOperation extends ActionRunnable<BulkResponse> {
private final Task task;
private BulkRequest bulkRequest; // set to null once all requests are sent out
private final AtomicArray<BulkItemResponse> responses;
private final long startTimeNanos;
private final ClusterStateObserver observer;
private final Map<String, IndexNotFoundException> indicesThatCannotBeCreated;
BulkOperation(Task task, BulkRequest bulkRequest, ActionListener<BulkResponse> listener, AtomicArray<BulkItemResponse> responses,
long startTimeNanos, Map<String, IndexNotFoundException> indicesThatCannotBeCreated) {
super(listener);
this.task = task;
this.bulkRequest = bulkRequest;
this.responses = responses;
this.startTimeNanos = startTimeNanos;
this.indicesThatCannotBeCreated = indicesThatCannotBeCreated;
this.observer = new ClusterStateObserver(clusterService, bulkRequest.timeout(), logger, threadPool.getThreadContext());
}
@Override
protected void doRun() {
assert bulkRequest != null;
final ClusterState clusterState = observer.setAndGetObservedState();
if (handleBlockExceptions(clusterState)) {
return;
}
final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver);
Metadata metadata = clusterState.metadata();
for (int i = 0; i < bulkRequest.requests.size(); i++) {
DocWriteRequest<?> docWriteRequest = bulkRequest.requests.get(i);
//the request can only be null because we set it to null in the previous step, so it gets ignored
if (docWriteRequest == null) {
continue;
}
if (addFailureIfRequiresAliasAndAliasIsMissing(docWriteRequest, i, metadata)) {
continue;
}
if (addFailureIfIndexIsUnavailable(docWriteRequest, i, concreteIndices, metadata)) {
continue;
}
Index concreteIndex = concreteIndices.resolveIfAbsent(docWriteRequest);
try {
// The ConcreteIndices#resolveIfAbsent(...) method validates via IndexNameExpressionResolver whether
// an operation is allowed in index into a data stream, but this isn't done when resolve call is cached, so
// the validation needs to be performed here too.
IndexAbstraction indexAbstraction = clusterState.getMetadata().getIndicesLookup().get(concreteIndex.getName());
if (indexAbstraction.getParentDataStream() != null &&
// avoid valid cases when directly indexing into a backing index
// (for example when directly indexing into .ds-logs-foobar-000001)
concreteIndex.getName().equals(docWriteRequest.index()) == false &&
docWriteRequest.opType() != DocWriteRequest.OpType.CREATE) {
throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams");
}
switch (docWriteRequest.opType()) {
case CREATE:
case INDEX:
prohibitAppendWritesInBackingIndices(docWriteRequest, metadata);
prohibitCustomRoutingOnDataStream(docWriteRequest, metadata);
IndexRequest indexRequest = (IndexRequest) docWriteRequest;
final IndexMetadata indexMetadata = metadata.index(concreteIndex);
MappingMetadata mappingMd = indexMetadata.mapping();
Version indexCreated = indexMetadata.getCreationVersion();
indexRequest.resolveRouting(metadata);
indexRequest.process(indexCreated, mappingMd, concreteIndex.getName());
break;
case UPDATE:
TransportUpdateAction.resolveAndValidateRouting(metadata, concreteIndex.getName(),
(UpdateRequest) docWriteRequest);
break;
case DELETE:
docWriteRequest.routing(metadata.resolveWriteIndexRouting(docWriteRequest.routing(), docWriteRequest.index()));
// check if routing is required, if so, throw error if routing wasn't specified
if (docWriteRequest.routing() == null && metadata.routingRequired(concreteIndex.getName())) {
throw new RoutingMissingException(concreteIndex.getName(), docWriteRequest.id());
}
break;
default: throw new AssertionError("request type not supported: [" + docWriteRequest.opType() + "]");
}
} catch (ElasticsearchParseException | IllegalArgumentException | RoutingMissingException e) {
BulkItemResponse.Failure failure = new BulkItemResponse.Failure(concreteIndex.getName(),
docWriteRequest.id(), e);
BulkItemResponse bulkItemResponse = new BulkItemResponse(i, docWriteRequest.opType(), failure);
responses.set(i, bulkItemResponse);
// make sure the request gets never processed again
bulkRequest.requests.set(i, null);
}
}
// first, go over all the requests and create a ShardId -> Operations mapping
Map<ShardId, List<BulkItemRequest>> requestsByShard = new HashMap<>();
for (int i = 0; i < bulkRequest.requests.size(); i++) {
DocWriteRequest<?> request = bulkRequest.requests.get(i);
if (request == null) {
continue;
}
String concreteIndex = concreteIndices.getConcreteIndex(request.index()).getName();
ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, request.id(),
request.routing()).shardId();
List<BulkItemRequest> shardRequests = requestsByShard.computeIfAbsent(shardId, shard -> new ArrayList<>());
shardRequests.add(new BulkItemRequest(i, request));
}
if (requestsByShard.isEmpty()) {
listener.onResponse(new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]),
buildTookInMillis(startTimeNanos)));
return;
}
final AtomicInteger counter = new AtomicInteger(requestsByShard.size());
String nodeId = clusterService.localNode().getId();
for (Map.Entry<ShardId, List<BulkItemRequest>> entry : requestsByShard.entrySet()) {
final ShardId shardId = entry.getKey();
final List<BulkItemRequest> requests = entry.getValue();
BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, bulkRequest.getRefreshPolicy(),
requests.toArray(new BulkItemRequest[requests.size()]));
bulkShardRequest.waitForActiveShards(bulkRequest.waitForActiveShards());
bulkShardRequest.timeout(bulkRequest.timeout());
bulkShardRequest.routedBasedOnClusterVersion(clusterState.version());
if (task != null) {
bulkShardRequest.setParentTask(nodeId, task.getId());
}
client.executeLocally(TransportShardBulkAction.TYPE, bulkShardRequest, new ActionListener<>() {
@Override
public void onResponse(BulkShardResponse bulkShardResponse) {
for (BulkItemResponse bulkItemResponse : bulkShardResponse.getResponses()) {
// we may have no response if item failed
if (bulkItemResponse.getResponse() != null) {
bulkItemResponse.getResponse().setShardInfo(bulkShardResponse.getShardInfo());
}
responses.set(bulkItemResponse.getItemId(), bulkItemResponse);
}
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Exception e) {
// create failures for all relevant requests
for (BulkItemRequest request : requests) {
final String indexName = concreteIndices.getConcreteIndex(request.index()).getName();
DocWriteRequest<?> docWriteRequest = request.request();
responses.set(request.id(), new BulkItemResponse(request.id(), docWriteRequest.opType(),
new BulkItemResponse.Failure(indexName, docWriteRequest.id(), e)));
}
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
private void finishHim() {
listener.onResponse(new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]),
buildTookInMillis(startTimeNanos)));
}
});
}
bulkRequest = null; // allow memory for bulk request items to be reclaimed before all items have been completed
}
private boolean handleBlockExceptions(ClusterState state) {
ClusterBlockException blockException = state.blocks().globalBlockedException(ClusterBlockLevel.WRITE);
if (blockException != null) {
if (blockException.retryable()) {
logger.trace("cluster is blocked, scheduling a retry", blockException);
retry(blockException);
} else {
onFailure(blockException);
}
return true;
}
return false;
}
void retry(Exception failure) {
assert failure != null;
if (observer.isTimedOut()) {
// we running as a last attempt after a timeout has happened. don't retry
onFailure(failure);
return;
}
observer.waitForNextChange(new ClusterStateObserver.Listener() {
@Override
public void onNewClusterState(ClusterState state) {
run();
}
@Override
public void onClusterServiceClose() {
onFailure(new NodeClosedException(clusterService.localNode()));
}
@Override
public void onTimeout(TimeValue timeout) {
// Try one more time...
run();
}
});
}
private boolean addFailureIfRequiresAliasAndAliasIsMissing(DocWriteRequest<?> request, int idx, final Metadata metadata) {
if (request.isRequireAlias() && (metadata.hasAlias(request.index()) == false)) {
Exception exception = new IndexNotFoundException("["
+ DocWriteRequest.REQUIRE_ALIAS
+ "] request flag is [true] and ["
+ request.index()
+ "] is not an alias",
request.index());
addFailure(request, idx, exception);
return true;
}
return false;
}
private boolean addFailureIfIndexIsUnavailable(DocWriteRequest<?> request, int idx, final ConcreteIndices concreteIndices,
final Metadata metadata) {
IndexNotFoundException cannotCreate = indicesThatCannotBeCreated.get(request.index());
if (cannotCreate != null) {
addFailure(request, idx, cannotCreate);
return true;
}
Index concreteIndex = concreteIndices.getConcreteIndex(request.index());
if (concreteIndex == null) {
try {
concreteIndex = concreteIndices.resolveIfAbsent(request);
} catch (IndexClosedException | IndexNotFoundException | IllegalArgumentException ex) {
addFailure(request, idx, ex);
return true;
}
}
IndexMetadata indexMetadata = metadata.getIndexSafe(concreteIndex);
if (indexMetadata.getState() == IndexMetadata.State.CLOSE) {
addFailure(request, idx, new IndexClosedException(concreteIndex));
return true;
}
return false;
}
private void addFailure(DocWriteRequest<?> request, int idx, Exception unavailableException) {
BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.id(),
unavailableException);
BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, request.opType(), failure);
responses.set(idx, bulkItemResponse);
// make sure the request gets never processed again
bulkRequest.requests.set(idx, null);
}
}
void executeBulk(Task task, final BulkRequest bulkRequest, final long startTimeNanos, final ActionListener<BulkResponse> listener,
final AtomicArray<BulkItemResponse> responses, Map<String, IndexNotFoundException> indicesThatCannotBeCreated) {
new BulkOperation(task, bulkRequest, listener, responses, startTimeNanos, indicesThatCannotBeCreated).run();
}
private static class ConcreteIndices {
private final ClusterState state;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final Map<String, Index> indices = new HashMap<>();
ConcreteIndices(ClusterState state, IndexNameExpressionResolver indexNameExpressionResolver) {
this.state = state;
this.indexNameExpressionResolver = indexNameExpressionResolver;
}
Index getConcreteIndex(String indexOrAlias) {
return indices.get(indexOrAlias);
}
Index resolveIfAbsent(DocWriteRequest<?> request) {
Index concreteIndex = indices.get(request.index());
if (concreteIndex == null) {
boolean includeDataStreams = request.opType() == DocWriteRequest.OpType.CREATE;
try {
concreteIndex = indexNameExpressionResolver.concreteWriteIndex(state, request.indicesOptions(),
request.indices()[0], false, includeDataStreams);
} catch (IndexNotFoundException e) {
if (includeDataStreams == false && e.getMetadataKeys().contains(EXCLUDED_DATA_STREAMS_KEY)) {
throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams");
} else {
throw e;
}
}
indices.put(request.index(), concreteIndex);
}
return concreteIndex;
}
}
private long relativeTime() {
return relativeTimeProvider.getAsLong();
}
private void processBulkIndexIngestRequest(Task task, BulkRequest original, String executorName,
ActionListener<BulkResponse> listener) {
final long ingestStartTimeInNanos = System.nanoTime();
final BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original);
ingestService.executeBulkRequest(
original.numberOfActions(),
() -> bulkRequestModifier,
bulkRequestModifier::markItemAsFailed,
(originalThread, exception) -> {
if (exception != null) {
logger.debug("failed to execute pipeline for a bulk request", exception);
listener.onFailure(exception);
} else {
long ingestTookInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - ingestStartTimeInNanos);
BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest();
ActionListener<BulkResponse> actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(ingestTookInMillis,
listener);
if (bulkRequest.requests().isEmpty()) {
// at this stage, the transport bulk action can't deal with a bulk request with no requests,
// so we stop and send an empty response back to the client.
// (this will happen if pre-processing all items in the bulk failed)
actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0));
} else {
// If a processor went async and returned a response on a different thread then
// before we continue the bulk request we should fork back on a write thread:
if (originalThread == Thread.currentThread()) {
assert Thread.currentThread().getName().contains(executorName);
doInternalExecute(task, bulkRequest, executorName, actionListener);
} else {
threadPool.executor(executorName).execute(new ActionRunnable<>(actionListener) {
@Override
protected void doRun() {
doInternalExecute(task, bulkRequest, executorName, actionListener);
}
@Override
public boolean isForceExecution() {
// If we fork back to a write thread we **not** should fail, because tp queue is full.
// (Otherwise the work done during ingest will be lost)
// It is okay to force execution here. Throttling of write requests happens prior to
// ingest when a node receives a bulk request.
return true;
}
});
}
}
}
},
bulkRequestModifier::markItemAsDropped,
executorName
);
}
static final class BulkRequestModifier implements Iterator<DocWriteRequest<?>> {
final BulkRequest bulkRequest;
final SparseFixedBitSet failedSlots;
final List<BulkItemResponse> itemResponses;
final AtomicIntegerArray originalSlots;
volatile int currentSlot = -1;
BulkRequestModifier(BulkRequest bulkRequest) {
this.bulkRequest = bulkRequest;
this.failedSlots = new SparseFixedBitSet(bulkRequest.requests().size());
this.itemResponses = new ArrayList<>(bulkRequest.requests().size());
this.originalSlots = new AtomicIntegerArray(bulkRequest.requests().size()); // oversize, but that's ok
}
@Override
public DocWriteRequest<?> next() {
return bulkRequest.requests().get(++currentSlot);
}
@Override
public boolean hasNext() {
return (currentSlot + 1) < bulkRequest.requests().size();
}
BulkRequest getBulkRequest() {
if (itemResponses.isEmpty()) {
return bulkRequest;
} else {
BulkRequest modifiedBulkRequest = new BulkRequest();
modifiedBulkRequest.setRefreshPolicy(bulkRequest.getRefreshPolicy());
modifiedBulkRequest.waitForActiveShards(bulkRequest.waitForActiveShards());
modifiedBulkRequest.timeout(bulkRequest.timeout());
int slot = 0;
List<DocWriteRequest<?>> requests = bulkRequest.requests();
for (int i = 0; i < requests.size(); i++) {
DocWriteRequest<?> request = requests.get(i);
if (failedSlots.get(i) == false) {
modifiedBulkRequest.add(request);
originalSlots.set(slot++, i);
}
}
return modifiedBulkRequest;
}
}
ActionListener<BulkResponse> wrapActionListenerIfNeeded(long ingestTookInMillis, ActionListener<BulkResponse> actionListener) {
if (itemResponses.isEmpty()) {
return actionListener.map(
response -> new BulkResponse(response.getItems(), response.getTook().getMillis(), ingestTookInMillis));
} else {
return actionListener.map(response -> {
BulkItemResponse[] items = response.getItems();
for (int i = 0; i < items.length; i++) {
itemResponses.add(originalSlots.get(i), response.getItems()[i]);
}
return new BulkResponse(
itemResponses.toArray(new BulkItemResponse[0]), response.getTook().getMillis(), ingestTookInMillis);
});
}
}
synchronized void markItemAsDropped(int slot) {
IndexRequest indexRequest = getIndexWriteRequest(bulkRequest.requests().get(slot));
failedSlots.set(slot);
final String id = indexRequest.id() == null ? DROPPED_ITEM_WITH_AUTO_GENERATED_ID : indexRequest.id();
itemResponses.add(
new BulkItemResponse(slot, indexRequest.opType(),
new UpdateResponse(
new ShardId(indexRequest.index(), IndexMetadata.INDEX_UUID_NA_VALUE, 0),
id, SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM,
indexRequest.version(), DocWriteResponse.Result.NOOP
)
)
);
}
synchronized void markItemAsFailed(int slot, Exception e) {
IndexRequest indexRequest = getIndexWriteRequest(bulkRequest.requests().get(slot));
// We hit a error during preprocessing a request, so we:
// 1) Remember the request item slot from the bulk, so that we're done processing all requests we know what failed
// 2) Add a bulk item failure for this request
// 3) Continue with the next request in the bulk.
failedSlots.set(slot);
BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexRequest.index(), indexRequest.id(), e);
itemResponses.add(new BulkItemResponse(slot, indexRequest.opType(), failure));
}
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.grpc;
import io.grpc.CallOptions;
import io.grpc.MethodDescriptor;
import io.grpc.ServerMethodDefinition;
import io.grpc.ServerServiceDefinition;
import io.grpc.ServiceDescriptor;
import io.grpc.internal.CompositeReadableBuffer;
import io.grpc.internal.ReadableBuffer;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.CompositeByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
/**
* Utilities for gRPC message serialization.
*/
public class GrpcSerializationUtils {
public static final CallOptions.Key<MethodDescriptor> OVERRIDDEN_METHOD_DESCRIPTOR =
CallOptions.Key.create("overridden method descriptor");
private static final Logger LOG = LoggerFactory.getLogger(GrpcSerializationUtils.class);
private static final int TAG_TYPE_BITS = 3;
private static final String BUFFER_INPUT_STREAM_CLASS_NAME =
"io.grpc.internal.ReadableBuffers$BufferInputStream";
private static final String BUFFER_FIELD_NAME = "buffer";
private static final String BUFFERS_FIELD_NAME = "buffers";
private static final String NETTY_WRITABLE_BUFFER_CLASS_NAME =
"io.grpc.netty.NettyWritableBuffer";
private static final String NETTY_READABLE_BUFFER_CLASS_NAME =
"io.grpc.netty.NettyReadableBuffer";
private static final String BUFFER_CHAIN_OUTPUT_STREAM_CLASS_NAME =
"io.grpc.internal.MessageFramer$BufferChainOutputStream";
private static final String BUFFER_LIST_FIELD_NAME = "bufferList";
private static final String CURRENT_FIELD_NAME = "current";
private static Constructor<?> sNettyWritableBufferConstructor;
private static Field sBufferList;
private static Field sCompositeBuffers = null;
private static Field sCurrent;
private static Field sReadableBufferField = null;
private static Field sReadableByteBuf = null;
private static boolean sZeroCopySendSupported = true;
private static boolean sZeroCopyReceiveSupported = true;
static {
try {
sReadableBufferField = getPrivateField(BUFFER_INPUT_STREAM_CLASS_NAME, BUFFER_FIELD_NAME);
} catch (Exception e) {
LOG.warn("Cannot get gRPC input stream buffer, zero copy send will be disabled.", e);
sZeroCopySendSupported = false;
}
try {
sNettyWritableBufferConstructor =
getPrivateConstructor(NETTY_WRITABLE_BUFFER_CLASS_NAME, ByteBuf.class);
sBufferList = getPrivateField(BUFFER_CHAIN_OUTPUT_STREAM_CLASS_NAME, BUFFER_LIST_FIELD_NAME);
sCurrent = getPrivateField(BUFFER_CHAIN_OUTPUT_STREAM_CLASS_NAME, CURRENT_FIELD_NAME);
sCompositeBuffers =
getPrivateField(CompositeReadableBuffer.class.getName(), BUFFERS_FIELD_NAME);
sReadableByteBuf = getPrivateField(NETTY_READABLE_BUFFER_CLASS_NAME, BUFFER_FIELD_NAME);
} catch (Exception e) {
LOG.warn("Cannot get gRPC output stream buffer, zero copy receive will be disabled.", e);
sZeroCopyReceiveSupported = false;
}
}
private static Field getPrivateField(String className, String fieldName)
throws NoSuchFieldException, ClassNotFoundException {
Class<?> declaringClass = Class.forName(className);
Field field = declaringClass.getDeclaredField(fieldName);
field.setAccessible(true);
return field;
}
private static Constructor<?> getPrivateConstructor(String className, Class<?> ...parameterTypes)
throws ClassNotFoundException, NoSuchMethodException {
Class<?> declaringClass = Class.forName(className);
Constructor<?> constructor = declaringClass.getDeclaredConstructor(parameterTypes);
constructor.setAccessible(true);
return constructor;
}
/**
* Makes a gRPC tag for a field.
*
* @param fieldNumber field number
* @param wireType wire type of the field
* @return the gRPC tag
*/
public static int makeTag(final int fieldNumber, final int wireType) {
// This is a public version of WireFormat.makeTag.
return (fieldNumber << TAG_TYPE_BITS) | wireType;
}
/**
* Gets a buffer directly from a gRPC input stream.
*
* @param stream the input stream
* @return the raw data buffer
*/
public static ReadableBuffer getBufferFromStream(InputStream stream) {
if (!sZeroCopyReceiveSupported
|| !stream.getClass().equals(sReadableBufferField.getDeclaringClass())) {
return null;
}
try {
return (ReadableBuffer) sReadableBufferField.get(stream);
} catch (Exception e) {
LOG.warn("Failed to get data buffer from stream.", e);
return null;
}
}
/**
* Gets a Netty buffer directly from a gRPC ReadableBuffer.
*
* @param buffer the input buffer
* @return the raw ByteBuf, or null if the ByteBuf cannot be extracted
*/
public static ByteBuf getByteBufFromReadableBuffer(ReadableBuffer buffer) {
if (!sZeroCopyReceiveSupported) {
return null;
}
try {
if (buffer instanceof CompositeReadableBuffer) {
Queue<ReadableBuffer> buffers = (Queue<ReadableBuffer>) sCompositeBuffers.get(buffer);
if (buffers.size() == 1) {
return getByteBufFromReadableBuffer(buffers.peek());
} else {
CompositeByteBuf buf = PooledByteBufAllocator.DEFAULT.compositeBuffer();
for (ReadableBuffer readableBuffer : buffers) {
ByteBuf subBuffer = getByteBufFromReadableBuffer(readableBuffer);
if (subBuffer == null) {
return null;
}
buf.addComponent(true, subBuffer);
}
return buf;
}
} else if (buffer.getClass().equals(sReadableByteBuf.getDeclaringClass())) {
return (ByteBuf) sReadableByteBuf.get(buffer);
}
} catch (Exception e) {
LOG.warn("Failed to get data buffer from stream: {}.", e.getMessage());
return null;
}
return null;
}
/**
* Add the given buffers directly to the gRPC output stream.
*
* @param buffers the buffers to be added
* @param stream the output stream
* @return whether the buffers are added successfully
*/
public static boolean addBuffersToStream(ByteBuf[] buffers, OutputStream stream) {
if (!sZeroCopySendSupported || !stream.getClass().equals(sBufferList.getDeclaringClass())) {
return false;
}
try {
if (sCurrent.get(stream) != null) {
return false;
}
for (ByteBuf buffer : buffers) {
Object nettyBuffer = sNettyWritableBufferConstructor.newInstance(buffer);
List list = (List) sBufferList.get(stream);
list.add(nettyBuffer);
buffer.retain();
sCurrent.set(stream, nettyBuffer);
}
return true;
} catch (Exception e) {
LOG.warn("Failed to add data buffer to stream: {}.", e.getMessage());
return false;
}
}
/**
* Creates a service definition that uses custom marshallers.
*
* @param service the service to intercept
* @param marshallers a map that specifies which marshaller to use for each method
* @return the new service definition
*/
public static ServerServiceDefinition overrideMethods(
final ServerServiceDefinition service,
final Map<MethodDescriptor, MethodDescriptor> marshallers) {
List<ServerMethodDefinition<?, ?>> newMethods = new ArrayList<ServerMethodDefinition<?, ?>>();
List<MethodDescriptor<?, ?>> newDescriptors = new ArrayList<MethodDescriptor<?, ?>>();
// intercepts the descriptors
for (final ServerMethodDefinition<?, ?> definition : service.getMethods()) {
ServerMethodDefinition<?, ?> newMethod = interceptMethod(definition, marshallers);
newDescriptors.add(newMethod.getMethodDescriptor());
newMethods.add(newMethod);
}
// builds the new service descriptor
final ServerServiceDefinition.Builder serviceBuilder = ServerServiceDefinition
.builder(new ServiceDescriptor(service.getServiceDescriptor().getName(), newDescriptors));
// creates the new service definition
for (ServerMethodDefinition<?, ?> definition : newMethods) {
serviceBuilder.addMethod(definition);
}
return serviceBuilder.build();
}
private static <ReqT, RespT> ServerMethodDefinition<ReqT, RespT> interceptMethod(
final ServerMethodDefinition<ReqT, RespT> definition,
final Map<MethodDescriptor, MethodDescriptor> newMethods) {
MethodDescriptor<ReqT, RespT> descriptor = definition.getMethodDescriptor();
MethodDescriptor newMethod = newMethods.get(descriptor);
if (newMethod != null) {
return ServerMethodDefinition.create(newMethod, definition.getServerCallHandler());
}
return definition;
}
}
| |
package at.ac.tuwien.dsg.hcu.monitor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.distribution.NormalDistribution;
import org.apache.commons.math3.random.MersenneTwister;
import org.apache.commons.math3.random.RandomGenerator;
import at.ac.tuwien.dsg.hcu.monitor.gridsim.GSMonitoringSimulation;
import at.ac.tuwien.dsg.hcu.monitor.impl.Broker;
import at.ac.tuwien.dsg.hcu.monitor.interfaces.AdapterInterface;
import at.ac.tuwien.dsg.hcu.monitor.interfaces.AgentInterface;
import at.ac.tuwien.dsg.hcu.monitor.interfaces.BrokerInterface;
import at.ac.tuwien.dsg.hcu.monitor.interfaces.ConsumerInterface;
import at.ac.tuwien.dsg.hcu.monitor.interfaces.ProducerInterface;
import at.ac.tuwien.dsg.hcu.monitor.model.Quality;
import at.ac.tuwien.dsg.hcu.monitor.model.Subscription;
public class Simulation {
String title = "HCS Monitoring Simulation";
HashMap<String, AgentInterface> agents;
List<AgentInterface> producerAgents;
List<BrokerInterface> brokers;
RandomGenerator gen = new MersenneTwister(1000);
@SuppressWarnings("unchecked")
public boolean init(Map<String, Object> config) {
if (config==null) {
System.err.println("Null scenario configuration\n");
return false;
}
title = (String) config.getOrDefault("title", title);
boolean producerBasedQualityEngineEnabled = (boolean) config.getOrDefault("producer_based_quality_engine_enabled", false);
Map<String, Object> globalConfig = new HashMap<String, Object>();
globalConfig.put("producer_based_quality_engine_enabled", producerBasedQualityEngineEnabled);
// create agents
agents = new HashMap<String, AgentInterface>();
producerAgents = new ArrayList<AgentInterface>();
brokers = new ArrayList<BrokerInterface>();
// TODO: make brokers configurable
BrokerInterface broker = new Broker();
broker.adjust(globalConfig);
brokers.add(broker);
List<HashMap<String, Object>> agentConfig = (List<HashMap<String, Object>>) config.get("monitoring_agents");
for (HashMap<String, Object> cfg: agentConfig) {
if (cfg.containsKey("enabled") && (Boolean)cfg.get("enabled")==false) {
continue;
}
// get agent config
String agentClassName = (String) cfg.get("class");
String agentName = (String) cfg.getOrDefault("name", "AGENT_" + (agents.size()+1));
Map<String, Object> adapterCfg = (HashMap<String, Object>) cfg.get("adapter");
Map<String, Object> consumerCfg = (HashMap<String, Object>) cfg.get("consumer");
List<Map<String, Object>> subscriptions = null;
if (consumerCfg!=null) {
subscriptions = (List<Map<String, Object>>) consumerCfg.get("subscriptions");
}
Map<String, Object> producerCfg = (HashMap<String, Object>) cfg.get("producer");
List<Map<String, Object>> topics = (List<Map<String, Object>>) cfg.get("topics");
if (agentClassName==null || (adapterCfg==null && consumerCfg==null)) {
System.err.println("Unspecified agentClassName or (adapterCfg and consumerCfg)\n");
return false;
}
// create agent
Integer duplicate = (Integer) cfg.getOrDefault("duplicate", 1);
for (int i=1; i<=duplicate; i++) {
// prepare duplication
String name = agentName;
List<Map<String, Object>> subs = subscriptions;
if (duplicate>1) {
name += "-" + i;
subs = duplicateSubscriptions(subscriptions, i);
}
// create
AgentInterface agent = createAgent(agentClassName, name, broker, adapterCfg, consumerCfg, subs, producerCfg, topics, globalConfig);
// add to agent list
agents.put(agent.getName(), agent);
}
}
return true;
}
@SuppressWarnings("unchecked")
private List<Map<String, Object>> duplicateSubscriptions(List<Map<String, Object>> subscriptions, int i) {
List<Map<String, Object>> subs = null;
if (subscriptions!=null) {
subs = new ArrayList<Map<String, Object>>();
for (Map<String, Object> subscription: subscriptions) {
Map<String, Object> sub = new HashMap<String, Object>(subscription);
Map<String, Object> quality = new HashMap<String, Object>((Map<String, Object>) sub.get("quality"));
Double rate = (Double) quality.get("rate");
// TODO: should not be hardcoded!
if (rate!=null) {
// used in varying clients experiments
Double newRate = generateNormalRandomNumber(rate, rate/10);
//quality.put("rate", newRate);
// used in varying rates experiments
quality.put("rate", rate * i);
sub.put("quality", quality);
}
subs.add(sub);
}
}
return subs;
}
private Double generateNormalRandomNumber(Double mean, Double stddev) {
NormalDistribution dist = new NormalDistribution(gen, mean, stddev, 1.0E-9);
return dist.sample();
}
@SuppressWarnings("unchecked")
private AgentInterface createAgent(
String agentClassName,
String agentName,
BrokerInterface broker,
Map<String, Object> adapterCfg,
Map<String, Object> consumerCfg,
List<Map<String, Object>> subscriptions,
Map<String, Object> producerCfg,
List<Map<String, Object>> topics,
Map<String, Object> globalConfig) {
AgentInterface agent = null;
try {
// instantiate agent
Class<?> agentClazz = Class.forName(agentClassName);
agent = (AgentInterface) agentClazz.newInstance();
agent.setName(agentName);
agent.setBroker(broker);
// instantiate adapter
if (adapterCfg!=null) {
String adapterClassName = (String) adapterCfg.get("class");
Map<String, Object> _adapterCfg = (Map<String, Object>) adapterCfg.get("config");
if (adapterClassName==null) {
System.err.println("Unspecified adapterClassName\n");
return null;
}
Class<?> adapterClazz = Class.forName(adapterClassName);
AdapterInterface adapter = (AdapterInterface) adapterClazz.newInstance();
adapter.adjust(_adapterCfg);
agent.setAdapter(adapter);
}
// instantiate consumer
else if (consumerCfg!=null) {
String consumerClassName = (String) consumerCfg.get("class");
Map<String, Object> _consumerCfg = (Map<String, Object>) consumerCfg.get("config");
if (consumerClassName==null) {
System.err.println("Unspecified consumerClassName\n");
return null;
}
Class<?> consumerClazz = Class.forName(consumerClassName);
ConsumerInterface consumer = (ConsumerInterface) consumerClazz.newInstance();
consumer.adjust(_consumerCfg);
agent.setConsumer(consumer);
// manage subscription
// NOTE: config sequence matters, the producing agent must already be created before
// TODO: support circular subscription
if (subscriptions!=null) {
for (Map<String, Object> subscriptionCfg: subscriptions) {
String to = (String) subscriptionCfg.get("to");
AgentInterface destAgent = agents.get(to);
String topic = (String) subscriptionCfg.get("topic");
Map<String, Object> _subscriptionCfg = (Map<String, Object>) subscriptionCfg.get("config");
if (to==null || topic==null || destAgent==null) {
System.err.println(String.format("Invalid subscription, to:%s, topic:%s, skipping...\n", to, topic));
continue;
}
Subscription subscription = new Subscription();
subscription.setTopic(topic);
subscription.setConfig(_subscriptionCfg);
Map<String, Object> subscriptionQuality = (Map<String, Object>) subscriptionCfg.get("quality");
if (subscriptionQuality!=null) {
subscription.setQuality(new Quality(subscriptionQuality));
}
consumer.subscribeTo(destAgent.getProducer(), subscription);
}
}
}
// instantiate producer
if (producerCfg!=null) {
String producerClassName = (String) producerCfg.get("class");
Map<String, Object> _producerCfg = (Map<String, Object>) producerCfg.get("config");
if (producerClassName==null) {
System.err.println("Unspecified producerClassName\n");
return null;
}
Class<?> producerClazz = Class.forName(producerClassName);
ProducerInterface producer = (ProducerInterface) producerClazz.newInstance();
producer.adjust(_producerCfg);
agent.setProducer(producer);
producerAgents.add(agent);
}
// add topics
if (topics!=null) {
for (Map<String, Object> topic: topics) {
String topicName = (String) topic.get("name");
Map<String, Object> topicConfig = (Map<String, Object>) topic.get("config");
if (topicName!=null) {
agent.addTopic(topicName, topicConfig);
}
}
}
agent.adjust(globalConfig);
} catch (ClassNotFoundException e) {
e.printStackTrace();
return null;
} catch (InstantiationException e) {
e.printStackTrace();
return null;
} catch (IllegalAccessException e) {
e.printStackTrace();
return null;
}
return agent;
}
public void start() {
System.out.println("Running " + title + "...");
// start simulation
GSMonitoringSimulation.startSimulation(agents, brokers, false);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpoint.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Provides information about the results of a request to create or update an endpoint that's associated with an event.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-2016-12-01/ItemResponse" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ItemResponse implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The response that was received after the endpoint data was accepted.
* </p>
*/
private EndpointItemResponse endpointItemResponse;
/**
* <p>
* A multipart response object that contains a key and a value for each event in the request. In each object, the
* event ID is the key and an EventItemResponse object is the value.
* </p>
*/
private java.util.Map<String, EventItemResponse> eventsItemResponse;
/**
* <p>
* The response that was received after the endpoint data was accepted.
* </p>
*
* @param endpointItemResponse
* The response that was received after the endpoint data was accepted.
*/
public void setEndpointItemResponse(EndpointItemResponse endpointItemResponse) {
this.endpointItemResponse = endpointItemResponse;
}
/**
* <p>
* The response that was received after the endpoint data was accepted.
* </p>
*
* @return The response that was received after the endpoint data was accepted.
*/
public EndpointItemResponse getEndpointItemResponse() {
return this.endpointItemResponse;
}
/**
* <p>
* The response that was received after the endpoint data was accepted.
* </p>
*
* @param endpointItemResponse
* The response that was received after the endpoint data was accepted.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ItemResponse withEndpointItemResponse(EndpointItemResponse endpointItemResponse) {
setEndpointItemResponse(endpointItemResponse);
return this;
}
/**
* <p>
* A multipart response object that contains a key and a value for each event in the request. In each object, the
* event ID is the key and an EventItemResponse object is the value.
* </p>
*
* @return A multipart response object that contains a key and a value for each event in the request. In each
* object, the event ID is the key and an EventItemResponse object is the value.
*/
public java.util.Map<String, EventItemResponse> getEventsItemResponse() {
return eventsItemResponse;
}
/**
* <p>
* A multipart response object that contains a key and a value for each event in the request. In each object, the
* event ID is the key and an EventItemResponse object is the value.
* </p>
*
* @param eventsItemResponse
* A multipart response object that contains a key and a value for each event in the request. In each object,
* the event ID is the key and an EventItemResponse object is the value.
*/
public void setEventsItemResponse(java.util.Map<String, EventItemResponse> eventsItemResponse) {
this.eventsItemResponse = eventsItemResponse;
}
/**
* <p>
* A multipart response object that contains a key and a value for each event in the request. In each object, the
* event ID is the key and an EventItemResponse object is the value.
* </p>
*
* @param eventsItemResponse
* A multipart response object that contains a key and a value for each event in the request. In each object,
* the event ID is the key and an EventItemResponse object is the value.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ItemResponse withEventsItemResponse(java.util.Map<String, EventItemResponse> eventsItemResponse) {
setEventsItemResponse(eventsItemResponse);
return this;
}
/**
* Add a single EventsItemResponse entry
*
* @see ItemResponse#withEventsItemResponse
* @returns a reference to this object so that method calls can be chained together.
*/
public ItemResponse addEventsItemResponseEntry(String key, EventItemResponse value) {
if (null == this.eventsItemResponse) {
this.eventsItemResponse = new java.util.HashMap<String, EventItemResponse>();
}
if (this.eventsItemResponse.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.eventsItemResponse.put(key, value);
return this;
}
/**
* Removes all the entries added into EventsItemResponse.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ItemResponse clearEventsItemResponseEntries() {
this.eventsItemResponse = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEndpointItemResponse() != null)
sb.append("EndpointItemResponse: ").append(getEndpointItemResponse()).append(",");
if (getEventsItemResponse() != null)
sb.append("EventsItemResponse: ").append(getEventsItemResponse());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ItemResponse == false)
return false;
ItemResponse other = (ItemResponse) obj;
if (other.getEndpointItemResponse() == null ^ this.getEndpointItemResponse() == null)
return false;
if (other.getEndpointItemResponse() != null && other.getEndpointItemResponse().equals(this.getEndpointItemResponse()) == false)
return false;
if (other.getEventsItemResponse() == null ^ this.getEventsItemResponse() == null)
return false;
if (other.getEventsItemResponse() != null && other.getEventsItemResponse().equals(this.getEventsItemResponse()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEndpointItemResponse() == null) ? 0 : getEndpointItemResponse().hashCode());
hashCode = prime * hashCode + ((getEventsItemResponse() == null) ? 0 : getEventsItemResponse().hashCode());
return hashCode;
}
@Override
public ItemResponse clone() {
try {
return (ItemResponse) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.pinpoint.model.transform.ItemResponseMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.pcelabelstore.util;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
import org.onlab.util.KryoNamespace;
import org.onosproject.cluster.NodeId;
import org.onosproject.store.Timestamp;
import org.onosproject.store.service.EventuallyConsistentMap;
import org.onosproject.store.service.EventuallyConsistentMapBuilder;
import org.onosproject.store.service.EventuallyConsistentMapEvent;
import org.onosproject.store.service.EventuallyConsistentMapListener;
import static org.onosproject.store.service.EventuallyConsistentMapEvent.Type.PUT;
import static org.onosproject.store.service.EventuallyConsistentMapEvent.Type.REMOVE;
/**
* Testing version of an Eventually Consistent Map.
*/
public final class TestEventuallyConsistentMap<K, V> extends EventuallyConsistentMapAdapter<K, V> {
private final HashMap<K, V> map;
private final String mapName;
private final List<EventuallyConsistentMapListener<K, V>> listeners;
private final BiFunction<K, V, Collection<NodeId>> peerUpdateFunction;
private TestEventuallyConsistentMap(String mapName,
BiFunction<K, V, Collection<NodeId>> peerUpdateFunction) {
map = new HashMap<>();
listeners = new LinkedList<>();
this.mapName = mapName;
this.peerUpdateFunction = peerUpdateFunction;
}
/**
* Notify all listeners of an event.
*/
private void notifyListeners(EventuallyConsistentMapEvent<K, V> event) {
listeners.forEach(
listener -> listener.event(event)
);
}
@Override
public int size() {
return map.size();
}
@Override
public boolean isEmpty() {
return map.isEmpty();
}
@Override
public boolean containsKey(K key) {
return map.containsKey(key);
}
@Override
public boolean containsValue(V value) {
return map.containsValue(value);
}
@Override
public V get(K key) {
return map.get(key);
}
@Override
public void put(K key, V value) {
map.put(key, value);
EventuallyConsistentMapEvent<K, V> addEvent =
new EventuallyConsistentMapEvent<>(mapName, PUT, key, value);
notifyListeners(addEvent);
if (peerUpdateFunction != null) {
peerUpdateFunction.apply(key, value);
}
}
@Override
public V remove(K key) {
V result = map.remove(key);
if (result != null) {
EventuallyConsistentMapEvent<K, V> removeEvent =
new EventuallyConsistentMapEvent<>(mapName, REMOVE,
key, map.get(key));
notifyListeners(removeEvent);
}
return result;
}
@Override
public void remove(K key, V value) {
boolean removed = map.remove(key, value);
if (removed) {
EventuallyConsistentMapEvent<K, V> removeEvent =
new EventuallyConsistentMapEvent<>(mapName, REMOVE, key, value);
notifyListeners(removeEvent);
}
}
@Override
public V compute(K key, BiFunction<K, V, V> recomputeFunction) {
return map.compute(key, recomputeFunction);
}
@Override
public void putAll(Map<? extends K, ? extends V> m) {
map.putAll(m);
}
@Override
public void clear() {
map.clear();
}
@Override
public Set<K> keySet() {
return map.keySet();
}
@Override
public Collection<V> values() {
return map.values();
}
@Override
public Set<Map.Entry<K, V>> entrySet() {
return map.entrySet();
}
public static <K, V> Builder<K, V> builder() {
return new Builder<>();
}
@Override
public void addListener(EventuallyConsistentMapListener<K, V> listener) {
listeners.add(listener);
}
@Override
public void removeListener(EventuallyConsistentMapListener<K, V> listener) {
listeners.remove(listener);
}
public static class Builder<K, V> implements EventuallyConsistentMapBuilder<K, V> {
private String name;
private BiFunction<K, V, Collection<NodeId>> peerUpdateFunction;
@Override
public EventuallyConsistentMapBuilder<K, V> withName(String name) {
this.name = name;
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withSerializer(KryoNamespace.Builder serializerBuilder) {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withSerializer(KryoNamespace serializer) {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V>
withTimestampProvider(BiFunction<K, V, Timestamp> timestampProvider) {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withEventExecutor(ExecutorService executor) {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withCommunicationExecutor(ExecutorService executor) {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withBackgroundExecutor(ScheduledExecutorService executor) {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V>
withPeerUpdateFunction(BiFunction<K, V, Collection<NodeId>> peerUpdateFunction) {
this.peerUpdateFunction = peerUpdateFunction;
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withTombstonesDisabled() {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withAntiEntropyPeriod(long period, TimeUnit unit) {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withFasterConvergence() {
return this;
}
@Override
public EventuallyConsistentMapBuilder<K, V> withPersistence() {
return this;
}
@Override
public EventuallyConsistentMap<K, V> build() {
if (name == null) {
name = "test";
}
return new TestEventuallyConsistentMap<>(name, peerUpdateFunction);
}
}
}
| |
package com.elnware.spotifystreamer.fragment;
import android.app.Activity;
import android.app.SearchManager;
import android.content.Context;
import android.database.Cursor;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.provider.SearchRecentSuggestions;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.Loader;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.SearchView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.elnware.spotifystreamer.BuildConfig;
import com.elnware.spotifystreamer.fragment.base.MyFragment;
import com.elnware.spotifystreamer.model.ParcelableArtist;
import com.elnware.spotifystreamer.model.ParcelableImage;
import com.elnware.spotifystreamer.util.DataLoader;
import com.elnware.spotifystreamer.util.Response;
import com.elnware.spotifystreamer.provider.MySuggestionProvider;
import com.elnware.spotifystreamer.R;
import com.elnware.spotifystreamer.util.ImageUtils;
import com.elnware.spotifystreamer.util.RetrofitErrorHandler;
import com.elnware.spotifystreamer.view.MyRecyclerAdapter;
import com.elnware.spotifystreamer.view.MyRecyclerView;
import com.elnware.spotifystreamer.view.MyRecyclerViewHolder;
import com.elnware.spotifystreamer.view.decorator.DividerItemDecoration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import butterknife.Bind;
import butterknife.ButterKnife;
import kaaes.spotify.webapi.android.SpotifyApi;
import kaaes.spotify.webapi.android.SpotifyService;
import kaaes.spotify.webapi.android.models.Artist;
import kaaes.spotify.webapi.android.models.ArtistsPager;
import retrofit.RetrofitError;
/**
* A search fragment containing a search box on the toolbar
* to search for artists and display the search results
*/
public class SearchFragment extends MyFragment
implements LoaderManager.LoaderCallbacks<Response<List<ParcelableArtist>>> {
public static final int DURATION = 700;
public static final int LOADER_ID = 1;
private static final String LOG_TAG = SearchFragment.class.getSimpleName();
private static final String EXTRA_SEARCH_TEXT = "search_text";
private static final String EXTRA_SEARCH_EXPANDED = "search_expanded";
private static final String EXTRA_SEARCH_ARTIST_LIST = "parcelable_artist_list";
private SpotifyService mSpotify;
private ArtistSearchResultAdapter mySearchResultAdapter;
@Bind(R.id.progress_container)
LinearLayout mProgressContainer;
@Bind(R.id.recycler_view)
MyRecyclerView myRecyclerView;
@Bind(android.R.id.empty)
View mEmptyView;
@Bind(R.id.tv_empty)
TextView mEmptyText;
private String mQueryString;
private boolean mIsSearchExpanded = false;
private SearchArtistCallback mCallback;
private SearchView mSearchView;
private Bundle mSavedInstanceState;
private List<ParcelableArtist> mArtistList;
public static SearchFragment newInstance() {
return new SearchFragment();
}
public SearchFragment() {
}
public interface SearchArtistCallback {
void onArtistSelected(Bundle bundle);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
SpotifyApi api = new SpotifyApi();
mSpotify = api.getService();
setHasOptionsMenu(true);
mySearchResultAdapter = new ArtistSearchResultAdapter(new ArrayList<ParcelableArtist>());
if (savedInstanceState != null) {
mArtistList = savedInstanceState.getParcelableArrayList(EXTRA_SEARCH_ARTIST_LIST);
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (mSearchView == null){
mQueryString = "";
}else{
mQueryString = mSearchView.getQuery().toString();
}
outState.putString(EXTRA_SEARCH_TEXT, mQueryString);
outState.putBoolean(EXTRA_SEARCH_EXPANDED, mIsSearchExpanded);
if (mArtistList != null){
outState.putParcelableArrayList(EXTRA_SEARCH_ARTIST_LIST, new ArrayList<ParcelableArtist>(mArtistList));
}
mySearchResultAdapter.onSaveInstanceState(outState);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
mSavedInstanceState = savedInstanceState;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
super.onCreateView(inflater, container, savedInstanceState);
View view = inflater.inflate(R.layout.fragment_spotify_search, container, false);
ButterKnife.bind(this, view);
mEmptyText.setText(getString(R.string.msg_search_click));
if (savedInstanceState != null) {
String searchString = savedInstanceState.getString(EXTRA_SEARCH_TEXT);
mIsSearchExpanded = savedInstanceState.getBoolean(EXTRA_SEARCH_EXPANDED, false);
if (searchString == null) {
searchString = "";
}
mQueryString = searchString;
getCompatActivity().supportInvalidateOptionsMenu();
}
LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity());
layoutManager.setOrientation(LinearLayoutManager.VERTICAL);
myRecyclerView.setLayoutManager(layoutManager);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
myRecyclerView.addItemDecoration(new DividerItemDecoration(getActivity(), null));
}
myRecyclerView.setHasFixedSize(true);
myRecyclerView.setEmptyView(mEmptyView);
myRecyclerView.setAdapter(mySearchResultAdapter);
if (savedInstanceState != null){
if (mArtistList != null){
mySearchResultAdapter.changeData(mArtistList);
}
}
mySearchResultAdapter.setRecyclerCallbacks(new MySimpleRecyclerCallback());
return view;
}
@Override
public void onDestroyView() {
super.onDestroyView();
ButterKnife.unbind(this);
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (activity instanceof SearchArtistCallback){
mCallback = (SearchArtistCallback) activity;
}else{
throw new ClassCastException("Activity must implement SearchArtistCallbacks");
}
}
@Override
public void onStart() {
super.onStart();
getLoaderManager().initLoader(LOADER_ID, null, this);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.search_fragment_menu, menu);
final MenuItem searchItem = menu.findItem(R.id.actionSearch);
SearchManager searchManager = (SearchManager) getActivity().getSystemService(Context.SEARCH_SERVICE);
mSearchView = (SearchView) MenuItemCompat.getActionView(searchItem);
mSearchView.setSearchableInfo(searchManager.getSearchableInfo(getActivity().getComponentName()));
if (mIsSearchExpanded) {
MenuItemCompat.expandActionView(searchItem);
mSearchView.setIconified(false);
}
mSearchView.setQuery(mQueryString, false);
mSearchView.setOnQueryTextListener(mQueryTextListener);
mSearchView.setOnCloseListener(new SearchView.OnCloseListener() {
@Override
public boolean onClose() {
mQueryString = "";
mIsSearchExpanded = false;
return false;
}
});
mSearchView.setOnSearchClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mIsSearchExpanded = true;
}
});
mSearchView.setOnSuggestionListener(new SearchView.OnSuggestionListener() {
@Override
public boolean onSuggestionSelect(int position) {
String suggestion = (String) mSearchView.getSuggestionsAdapter().getItem(position);
mSearchView.setQuery(suggestion, true);
return true;
}
@Override
public boolean onSuggestionClick(int position) {
Cursor cursor = (Cursor) mSearchView.getSuggestionsAdapter().getItem(position);
mSearchView.setQuery(cursor.getString(SearchRecentSuggestions.QUERIES_PROJECTION_QUERY_INDEX), true);
mSearchView.setIconified(true);
return true;
}
});
}
private final SearchView.OnQueryTextListener mQueryTextListener = new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
mQueryString = query;
SearchRecentSuggestions suggestions = new SearchRecentSuggestions(getActivity(),
MySuggestionProvider.AUTHORITY, MySuggestionProvider.MODE);
suggestions.saveRecentQuery(query, null);
showProgressIndeterminate(true);
//Clear Selection to deselect the highlighted
mySearchResultAdapter.clearSelection();
getLoaderManager()
.restartLoader(LOADER_ID, null, SearchFragment.this)
.forceLoad();
mSearchView.setIconified(true);
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
};
private static void log(String tag) {
if (BuildConfig.DEBUG) {
Log.d(LOG_TAG, tag);
}
}
private void showProgressIndeterminate(boolean isShowing) {
if (isShowing) {
mProgressContainer.setVisibility(View.VISIBLE);
myRecyclerView.setVisibility(View.GONE);
mEmptyView.setVisibility(View.GONE);
} else {
mProgressContainer.setVisibility(View.GONE);
myRecyclerView.setVisibility(View.VISIBLE);
}
}
@Override
public Loader<Response<List<ParcelableArtist>>> onCreateLoader(int id, Bundle args) {
return new SearchTaskLoader(getActivity(), mQueryString);
}
@Override
public void onLoadFinished(Loader<Response<List<ParcelableArtist>>> loader, Response<List<ParcelableArtist>> response) {
mArtistList = response.getData();
mySearchResultAdapter.changeData(response.getData());
if (mSavedInstanceState != null) {
mySearchResultAdapter.onRestoreInstanceState(mSavedInstanceState);
mySearchResultAdapter.notifyDataSetChanged();
}
if (!response.isError()) {
mEmptyText.setText(getString(R.string.msg_no_artist_found));
} else {
String standardErrorMessage = RetrofitErrorHandler.getStandardErrorMessage(getActivity(),
response.getThrowable());
mEmptyText.setText(standardErrorMessage);
}
showProgressIndeterminate(false);
}
@Override
public void onLoaderReset(Loader<Response<List<ParcelableArtist>>> loader) {
}
private class MySimpleRecyclerCallback extends MyRecyclerView.SimpleRecyclerCallbacks {
@Override
public void OnItemClick(final View view, int position) {
super.OnItemClick(view, position);
final ParcelableArtist artist = mySearchResultAdapter.getItems().get(position);
Bundle bundle = new Bundle();
bundle.putString(TopTrackFragment.EXTRA_ARTIST_ID, artist.id);
bundle.putString(TopTrackFragment.EXTRA_ARTIST_NAME, artist.name);
ParcelableImage image = ImageUtils.getOptimumParcelableImage(artist.images, R.integer.default_cover_image_width);
String imageUrl = "";
if (image != null){
imageUrl = image.url;
}
if (image != null) {
bundle.putString(TopTrackFragment.EXTRA_ARTIST_IMAGE, imageUrl);
}
if (hasTwoPanes()) {
mySearchResultAdapter.setItemChecked(position, true);
mCallback.onArtistSelected(bundle);
}else{
getSupportFragmentManager().beginTransaction()
.addToBackStack(null)
.replace(R.id.fragment_container,
TopTrackFragment.newInstance(bundle))
.commit();
}
}
}
/**
* Artist Result adapter
*/
public class ArtistSearchResultAdapter extends
MyRecyclerAdapter<List<ParcelableArtist>, ArtistSearchResultAdapter.ViewHolder, ParcelableArtist> {
public ArtistSearchResultAdapter(List<ParcelableArtist> data) {
super(data);
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int position) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.list_item_artist, parent, false);
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
ParcelableArtist artist = getItem(position);
holder.tvArtistTitle.setText(artist.name);
/**
* we will use resource to store the width setting so that we can modify
* them on larger device if necessary
*/
ParcelableImage image = ImageUtils.getOptimumParcelableImage(artist.images,
getResources().getInteger(R.integer.default_list_image_width));
if (image != null) {
Glide.with(SearchFragment.this)
.load(image.url)
.error(R.drawable.empty)
.into(holder.ivArtistIcon);
} else {
holder.ivArtistIcon.setImageResource(R.drawable.empty);
}
Drawable drawable = holder.view.getBackground();
if (isSelected(position)) {
holder.view.setSelected(true);
} else {
holder.view.setSelected(false);
}
}
public class ViewHolder extends MyRecyclerViewHolder {
final TextView tvArtistTitle;
final ImageView ivArtistIcon;
final View view;
public ViewHolder(View view) {
super(view);
this.view = view;
ivArtistIcon = (ImageView) view.findViewById(R.id.img_artist_icon);
tvArtistTitle = (TextView) view.findViewById(R.id.tv_artist_name);
}
@Override
public void onClick(View view) {
if (getRecyclerCallbacks() != null) {
getRecyclerCallbacks().OnItemClick(view, getAdapterPosition());
}
}
}
}
/**
* Task Loader for Searching Artist
*/
public static class SearchTaskLoader extends DataLoader<Response<List<ParcelableArtist>>> {
private final SpotifyService mService;
private String mQueryString;
public SearchTaskLoader(Context context, String queryString) {
super(context);
SpotifyApi spotifyApi = new SpotifyApi();
mService = spotifyApi.getService();
mQueryString = queryString;
}
@Override
public Response<List<ParcelableArtist>> loadInBackground() {
Response<List<ParcelableArtist>> response = new Response();
response.setData(Collections.EMPTY_LIST);
ArtistsPager results;
try {
results = mService.searchArtists(mQueryString);
} catch (RetrofitError e) {
e.printStackTrace();
response.setThrowable(RetrofitErrorHandler.handleError(e));
return response;
}
if (results.artists != null && results.artists.items.size() > 0) {
List<ParcelableArtist> parcelableArtistList = new ArrayList<>();
for(Artist artist: results.artists.items){
parcelableArtistList.add(ParcelableArtist.copy(artist));
}
response.setData(parcelableArtistList);
return response;
}
return response;
}
}
}
| |
/*
* Copyright 2014-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openhubframework.openhub.core.common.asynch;
import static org.openhubframework.openhub.api.asynch.AsynchConstants.*;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import org.apache.camel.*;
import org.apache.camel.component.spring.ws.SpringWebserviceConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.Assert;
import org.openhubframework.openhub.api.asynch.AsynchConstants;
import org.openhubframework.openhub.api.asynch.AsynchResponseProcessor;
import org.openhubframework.openhub.api.asynch.model.CallbackResponse;
import org.openhubframework.openhub.api.asynch.model.ConfirmationTypes;
import org.openhubframework.openhub.api.entity.Message;
import org.openhubframework.openhub.api.exception.IntegrationException;
import org.openhubframework.openhub.api.exception.InternalErrorEnum;
import org.openhubframework.openhub.api.exception.StoppingException;
import org.openhubframework.openhub.api.exception.ThrottlingExceededException;
import org.openhubframework.openhub.api.route.AbstractBasicRoute;
import org.openhubframework.openhub.api.route.CamelConfiguration;
import org.openhubframework.openhub.common.log.LogContextFilter;
import org.openhubframework.openhub.core.common.asynch.msg.MessageTransformer;
import org.openhubframework.openhub.core.common.event.AsynchEventHelper;
import org.openhubframework.openhub.core.common.exception.ExceptionTranslator;
import org.openhubframework.openhub.core.common.validator.TraceIdentifierValidator;
import org.openhubframework.openhub.spi.msg.MessageService;
import org.openhubframework.openhub.spi.throttling.ThrottleScope;
import org.openhubframework.openhub.spi.throttling.ThrottlingProcessor;
/**
* Route definition that processes incoming asynchronous message and make the following steps:
* <ol>
* <li>parse trace (SOAP) header from the request
* <li>creates {@link Message} entity
* <li>check throttling
* <li>saves Message into db
* <li>creates OK/FAIL response
* </ol>
*
* If everything works fine then the message is asynchronously redirected for next processing
* without need to take it from message queue.
*
* @author Petr Juza
* @see AsynchResponseProcessor
*/
@CamelConfiguration(value = AsynchInMessageRoute.ROUTE_BEAN)
public class AsynchInMessageRoute extends AbstractBasicRoute {
private static final Logger LOG = LoggerFactory.getLogger(AsynchInMessageRoute.class);
public static final String ROUTE_BEAN = "inMsgRouteBean";
/**
* The main route for processing incoming asynchronous messages.
*/
public static final String ROUTE_ID_ASYNC = "asyncProcessIn" + AbstractBasicRoute.ROUTE_SUFFIX;
static final int NEW_MSG_PRIORITY = 10;
static final String URI_GUARANTEED_ORDER_ROUTE = "direct:guaranteedOrderRoute";
static final String ROUTE_ID_GUARANTEED_ORDER = "guaranteedOrder" + AbstractBasicRoute.ROUTE_SUFFIX;
private static final long GUARANTEED_ORDER_MESSAGES_LIMIT = 2L;
@Autowired
private ThrottlingProcessor throttlingProcessor;
@Autowired
private MessageService messageService;
// list of validator for trace identifier is not mandatory
@Autowired(required = false)
private List<TraceIdentifierValidator> validatorList;
/**
* Route for incoming asynchronous message input operation.
* <p>
* Prerequisite: defined message headers {@link AsynchConstants#SERVICE_HEADER}, {@link AsynchConstants#OPERATION_HEADER}
* and optional {@link AsynchConstants#OBJECT_ID_HEADER}
* <p>
* Output: {@link CallbackResponse} for OK message or fill {@link AsynchConstants#ERR_CALLBACK_RES_PROP} exchange property
* if error occurred
*/
@Override
@SuppressWarnings("unchecked")
public void doConfigure() throws Exception {
from(URI_ASYNCH_IN_MSG)
.routeId(ROUTE_ID_ASYNC)
.doTry()
// check headers existence
.validate(header(SERVICE_HEADER).isNotNull())
.validate(header(OPERATION_HEADER).isNotNull())
// extract trace header, trace header is mandatory
.process(new TraceHeaderProcessor(true, validatorList))
// remove inbound Spring WS SOAP header, so it isn't added to outbound SOAP messages
.removeHeader(SpringWebserviceConstants.SPRING_WS_SOAP_HEADER)
// create Message (state = PROCESSING)
.bean(MessageTransformer.getInstance(), "createMessage")
// throttling
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Message msg = exchange.getIn().getBody(Message.class);
Assert.notNull(msg, "the msg must not be null");
ThrottleScope throttleScope = new ThrottleScope(msg.getSourceSystem().getSystemName(),
msg.getOperationName());
throttlingProcessor.throttle(throttleScope);
}
}).id("throttleProcess")
// save it to DB
// in big load a persisting via JPA camel component causes a blocking of processing asynchronous messages
.bean(ROUTE_BEAN, "insertMessage")
// check guaranteed order
// .to(ExchangePattern.InOnly, URI_GUARANTEED_ORDER_ROUTE)
//TODO (juza) finish in 1.1 version
.to(URI_GUARANTEED_ORDER_ROUTE)
// create OK response
.bean(ROUTE_BEAN, "createOkResponse")
.endDoTry()
.doCatch(ThrottlingExceededException.class)
// we want to throw exception, not return fail response
.log(LoggingLevel.ERROR, "Incoming route - throttling rules were exceeded: ${property."
+ Exchange.EXCEPTION_CAUGHT + ".message}.")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
throw (Exception) exchange.getProperty(Exchange.EXCEPTION_CAUGHT);
}
})
.doCatch(StoppingException.class)
// we want to throw exception, not return fail response
.log(LoggingLevel.INFO, "Incoming route - asynchronous message was rejected because ESB was stopping.")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
throw (Exception) exchange.getProperty(Exchange.EXCEPTION_CAUGHT);
}
})
.doCatch(SQLException.class, Exception.class)
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Exception ex = (Exception) exchange.getProperty(Exchange.EXCEPTION_CAUGHT);
LOG.error("Incoming route - error during saving incoming message: ", ex);
}
})
// create FAIL response
.bean(AsynchInMessageRoute.class, "createFailResponse")
.end()
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
// nothing to do - it's because correct running unit tests
}
});
// check guaranteed order
from(URI_GUARANTEED_ORDER_ROUTE)
.routeId(ROUTE_ID_GUARANTEED_ORDER)
.errorHandler(noErrorHandler())
.validate(body().isInstanceOf(Message.class))
// for case when exception is thrown - message has been already saved into DB
// => mark it as PARTLY_FAILED and process it later in standard way
// .setHeader(AsynchConstants.ASYNCH_MSG_HEADER, constant(true))
//TODO (juza) finish in 1.1 version + delete errorHandler
.choice()
.when().method(ROUTE_BEAN, "isMsgInGuaranteedOrder")
// no guaranteed order or message in the right order => continue
.bean(ROUTE_BEAN, "saveLogContextParams")
.bean(ROUTE_BEAN, "setInsertQueueTimestamp")
.bean(ROUTE_BEAN, "setMsgPriority")
// redirect message asynchronously for next processing
.to(ExchangePattern.RobustInOnly, AsynchConstants.URI_ASYNC_MSG).id("toAsyncRoute")
.otherwise()
// message isn't in right guaranteed order => postpone
.bean(ROUTE_BEAN, "postponeMessage")
.end()
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
// nothing to do - it's because correct running unit tests
}
});
}
/**
* Insert new message into database.
*
* @param msg message that will be saved
* @return saved message
*/
@Handler
public Message insertMessage(@Body final Message msg) {
Assert.notNull(msg, "msg can not be null");
LOG.debug("Insert new asynch message '" + msg.toHumanString() + "'.");
messageService.insertMessage(msg);
return msg;
}
/**
* Checks if specified message should be processed in guaranteed order and if yes
* then checks if the message is in the right order.
*
* @param msg the asynchronous message
* @return {@code true} if message's order is ok otherwise {@code false}
*/
@Handler
public boolean isMsgInGuaranteedOrder(@Body Message msg) {
if (!msg.isGuaranteedOrder()) {
// no guaranteed order => continue
return true;
} else {
// guaranteed order => is the message in the right order?
List<Message> messages = getBean(MessageService.class)
.getMessagesForGuaranteedOrderForRoute(msg.getFunnelValue(), msg.isExcludeFailedState(), GUARANTEED_ORDER_MESSAGES_LIMIT);
if (messages.size() == 1) {
LOG.debug("There is only one processing message with funnel value: " + msg.getFunnelValue()
+ " => continue");
return true;
// is specified message first one for processing?
} else if (messages.get(0).equals(msg)) {
LOG.debug("Processing message (msg_id = {}, funnel value = '{}') is the first one"
+ " => continue", msg.getMsgId(), msg.getFunnelValue());
return true;
} else {
LOG.debug("There is at least one processing message with funnel value '{}'"
+ " before current message (msg_id = {}); message {} will be postponed.",
msg.getFunnelValue(), msg.getMsgId(), msg.toHumanString());
return false;
}
}
}
@Handler
public void postponeMessage(Exchange exchange, @Body Message msg) {
// set Message to header because of event notification
exchange.getIn().setHeader(AsynchConstants.MSG_HEADER, msg);
// change state
getBean(MessageService.class).setStatePostponed(msg);
// generates event
AsynchEventHelper.notifyMsgPostponed(exchange);
}
/**
* Saves log request ID into header {@link LogContextFilter#CTX_REQUEST_ID}.
* It's because child threads don't inherits this information from parent thread automatically.
*
* @param msg the message
* @param headers the incoming message headers
*/
@Handler
public void saveLogContextParams(@Body Message msg, @Headers Map<String, Object> headers) {
// request ID should be set from LogContextFilter#initContext
Map contextMap = MDC.getCopyOfContextMap();
String requestId = null;
if (contextMap != null && contextMap.get(LogContextFilter.CTX_REQUEST_ID) != null) {
requestId = (String) contextMap.get(LogContextFilter.CTX_REQUEST_ID);
headers.put(LogContextFilter.CTX_REQUEST_ID, requestId);
}
LogContextHelper.setLogContextParams(msg, requestId);
}
@Handler
public void setInsertQueueTimestamp(@Headers Map<String, Object> headers) {
headers.put(AsynchConstants.MSG_QUEUE_INSERT_HEADER, System.currentTimeMillis());
}
@Handler
public void setMsgPriority(@Body Message msg) {
// new messages will be processed earlier then PARTLY_FAILED or POSTPONED messages
msg.setProcessingPriority(NEW_MSG_PRIORITY);
}
/**
* Creates OK response.
*
* @param exchange the exchange
* @return CallbackResponse
*/
@Handler
public CallbackResponse createOkResponse(Exchange exchange) {
CallbackResponse callbackResponse = new CallbackResponse();
callbackResponse.setStatus(ConfirmationTypes.OK);
return callbackResponse;
}
/**
* Creates FAIL response {@link CallbackResponse}
* and saves it into {@link AsynchConstants#ERR_CALLBACK_RES_PROP} exchange property.
*
* @param exchange the exchange
*/
@Handler
public void createFailResponse(Exchange exchange) {
// can be more errors during processing
if (exchange.getProperty(ERR_CALLBACK_RES_PROP) != null) {
return;
}
CallbackResponse callbackResponse = new CallbackResponse();
callbackResponse.setStatus(ConfirmationTypes.FAIL);
// creates error message
Exception ex = (Exception) exchange.getProperty(Exchange.EXCEPTION_CAUGHT);
String additionalInfo;
if (ex instanceof IntegrationException) {
additionalInfo = ((IntegrationException) ex).getError() + ": " + ex.getMessage();
} else {
additionalInfo = ExceptionTranslator.composeErrorMessage(InternalErrorEnum.E106, ex);
}
callbackResponse.setAdditionalInfo(additionalInfo);
exchange.setProperty(ERR_CALLBACK_RES_PROP, callbackResponse);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.wss4j.dom.message;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.Collections;
import javax.security.auth.callback.CallbackHandler;
import org.apache.wss4j.common.bsp.BSPRule;
import org.apache.wss4j.common.crypto.Crypto;
import org.apache.wss4j.common.crypto.CryptoFactory;
import org.apache.wss4j.common.ext.WSSecurityException;
import org.apache.wss4j.common.token.SecurityTokenReference;
import org.apache.wss4j.common.util.UsernameTokenUtil;
import org.apache.wss4j.common.util.XMLUtils;
import org.apache.wss4j.dom.WSConstants;
import org.apache.wss4j.dom.common.EncodedPasswordCallbackHandler;
import org.apache.wss4j.dom.common.SOAPUtil;
import org.apache.wss4j.dom.common.SecurityTestUtil;
import org.apache.wss4j.dom.common.UsernamePasswordCallbackHandler;
import org.apache.wss4j.dom.engine.WSSConfig;
import org.apache.wss4j.dom.engine.WSSecurityEngine;
import org.apache.wss4j.dom.engine.WSSecurityEngineResult;
import org.apache.wss4j.dom.handler.RequestData;
import org.apache.wss4j.dom.handler.WSHandlerResult;
import org.apache.wss4j.dom.message.token.UsernameToken;
import org.apache.wss4j.dom.util.WSSecurityUtil;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Test;
import org.w3c.dom.Document;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/**
* WS-Security Test Case for UsernameToken Key Derivation, as defined in the
* UsernameTokenProfile 1.1 specification. The derived keys are used to encrypt
* and sign, as per wsc:DerivedKeyToken.
*/
public class UTDerivedKeyTest {
private static final org.slf4j.Logger LOG =
org.slf4j.LoggerFactory.getLogger(UTDerivedKeyTest.class);
private CallbackHandler callbackHandler = new UsernamePasswordCallbackHandler();
private Crypto crypto;
@AfterAll
public static void cleanup() throws Exception {
SecurityTestUtil.cleanup();
}
public UTDerivedKeyTest() throws Exception {
crypto = CryptoFactory.getInstance();
}
/**
* Unit test for the UsernameToken derived key functionality
*/
@Test
public void testUsernameTokenUnit() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
UsernameToken usernameToken = new UsernameToken(true, doc, null);
usernameToken.setName("bob");
byte[] salt = UsernameTokenUtil.generateSalt(false);
usernameToken.addSalt(doc, salt, false);
assertTrue(salt.length == 16);
assertTrue(salt[0] == 0x02);
byte[] utSalt = usernameToken.getSalt();
assertTrue(salt.length == utSalt.length);
for (int i = 0; i < salt.length; i++) {
assertTrue(salt[i] == utSalt[i]);
}
usernameToken.addIteration(doc, 500);
assertTrue(usernameToken.getIteration() == 500);
WSSecurityUtil.prependChildElement(
secHeader.getSecurityHeaderElement(), usernameToken.getElement()
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
byte[] derivedKey = UsernameTokenUtil.generateDerivedKey("security", salt, 500);
assertTrue(derivedKey.length == 20);
// "c2VjdXJpdHk=" is the Base64 encoding of "security"
derivedKey = UsernameTokenUtil.generateDerivedKey(org.apache.xml.security.utils.XMLUtils.decode("c2VjdXJpdHk="),
salt, 500);
assertTrue(derivedKey.length == 20);
}
/**
* Test for encoded passwords.
*/
@Test
public void testDerivedKeyWithEncodedPasswordBaseline() throws Exception {
String password = "password";
// The SHA-1 of the password is known as a password equivalent in the UsernameToken specification.
byte[] passwordHash = MessageDigest.getInstance("SHA-1").digest(password.getBytes(StandardCharsets.UTF_8));
byte[] salt = org.apache.xml.security.utils.XMLUtils.decode("LKpycbfgRzwDnBz6kkhAAQ==");
int iteration = 1049;
byte[] expectedDerivedKey = org.apache.xml.security.utils.XMLUtils.decode("C7Ll/OY4TECb6hZuMMiX/5hzszo=");
byte[] derivedKey = UsernameTokenUtil.generateDerivedKey(passwordHash, salt, iteration);
assertArrayEquals(expectedDerivedKey, derivedKey, "the derived key is not as expected");
}
/**
* Test using a UsernameToken derived key for encrypting a SOAP body
*/
@Test
public void testDerivedKeyEncryption() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setUserInfo("bob", "security");
builder.addDerivedKey(false, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(false);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
encrBuilder.setTokenIdentifier(tokenIdentifier);
encrBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document encryptedDoc = encrBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(encryptedDoc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
assertFalse(outputString.contains("testMethod"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
verify(encryptedDoc);
try {
verify(encryptedDoc, false);
fail("Failure expected on deriving keys from a UsernameToken not allowed");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
}
/**
* Test using a UsernameToken derived key for encrypting a SOAP body
*/
@Test
public void testDerivedKeyEncryptionWithEncodedPassword() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setPasswordsAreEncoded(true);
byte[] bytes = MessageDigest.getInstance("SHA-1").digest("security".getBytes(StandardCharsets.UTF_8));
builder.setUserInfo("bob", org.apache.xml.security.utils.XMLUtils.encodeToString(bytes));
builder.addDerivedKey(false, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(false);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
encrBuilder.setTokenIdentifier(tokenIdentifier);
encrBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document encryptedDoc = encrBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(encryptedDoc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
assertFalse(outputString.contains("testMethod"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
RequestData requestData = new RequestData();
requestData.setEncodePasswords(true);
requestData.setAllowUsernameTokenNoPassword(true);
requestData.setCallbackHandler(new EncodedPasswordCallbackHandler());
WSSecurityEngine newEngine = new WSSecurityEngine();
newEngine.processSecurityHeader(encryptedDoc, requestData);
}
/**
* Test using a UsernameToken derived key for encrypting a SOAP body. In this test the
* derived key is modified before encryption, and so decryption should fail.
*/
@Test
public void testDerivedKeyChangedEncryption() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setUserInfo("bob", "security");
builder.addDerivedKey(false, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(false);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
derivedKey[5] = 'z';
derivedKey[6] = 'a';
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
encrBuilder.setTokenIdentifier(tokenIdentifier);
encrBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document encryptedDoc = encrBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(encryptedDoc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
assertFalse(outputString.contains("testMethod"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(encryptedDoc);
fail("Failure expected on a bad derived encryption");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.FAILED_CHECK);
}
}
/**
* Test using a UsernameToken derived key for encrypting a SOAP body. In this test the
* user is "colm" rather than "bob", and so decryption should fail.
*/
@Test
public void testDerivedKeyBadUserEncryption() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setUserInfo("colm", "security");
builder.addDerivedKey(false, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(false);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
encrBuilder.setTokenIdentifier(tokenIdentifier);
encrBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document encryptedDoc = encrBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(encryptedDoc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
assertFalse(outputString.contains("testMethod"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(encryptedDoc);
fail("Failure expected on a bad derived encryption");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
}
/**
* Test using a UsernameToken derived key for signing a SOAP body
*/
@Test
public void testDerivedKeySignature() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setUserInfo("bob", "security");
builder.addDerivedKey(true, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(true);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key signature
//
WSSecDKSign sigBuilder = new WSSecDKSign(secHeader);
sigBuilder.setTokenIdentifier(tokenIdentifier);
sigBuilder.setSignatureAlgorithm(WSConstants.HMAC_SHA1);
sigBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document signedDoc = sigBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
WSHandlerResult results = verify(signedDoc);
WSSecurityEngineResult actionResult =
results.getActionResults().get(WSConstants.SIGN).get(0);
java.security.Principal principal =
(java.security.Principal) actionResult.get(WSSecurityEngineResult.TAG_PRINCIPAL);
// System.out.println(principal.getName());
assertTrue(principal.getName().contains("DK"));
}
/**
* Test using a UsernameToken derived key for signing a SOAP body
*/
@Test
public void testDerivedKeySignatureWithEncodedPassword() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setPasswordsAreEncoded(true);
byte[] bytes = MessageDigest.getInstance("SHA-1").digest("security".getBytes(StandardCharsets.UTF_8));
builder.setUserInfo("bob", org.apache.xml.security.utils.XMLUtils.encodeToString(bytes));
builder.addDerivedKey(true, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(true);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key signature
//
WSSecDKSign sigBuilder = new WSSecDKSign(secHeader);
sigBuilder.setTokenIdentifier(tokenIdentifier);
sigBuilder.setSignatureAlgorithm(WSConstants.HMAC_SHA1);
sigBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document signedDoc = sigBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
RequestData requestData = new RequestData();
requestData.setEncodePasswords(true);
requestData.setAllowUsernameTokenNoPassword(true);
requestData.setCallbackHandler(new EncodedPasswordCallbackHandler());
WSSecurityEngine newEngine = new WSSecurityEngine();
WSHandlerResult results = newEngine.processSecurityHeader(signedDoc, requestData);
WSSecurityEngineResult actionResult =
results.getActionResults().get(WSConstants.SIGN).get(0);
java.security.Principal principal =
(java.security.Principal) actionResult.get(WSSecurityEngineResult.TAG_PRINCIPAL);
// System.out.println(principal.getName());
assertTrue(principal.getName().contains("DK"));
}
/**
* Test using a UsernameToken derived key for signing a SOAP body. In this test the
* derived key is modified before signature, and so signature verification should
* fail.
*/
@Test
public void testDerivedKeyChangedSignature() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setUserInfo("bob", "security");
builder.addDerivedKey(true, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(true);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
if (derivedKey[5] != 12) {
derivedKey[5] = 12;
} else {
derivedKey[5] = 13;
}
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key signature
//
WSSecDKSign sigBuilder = new WSSecDKSign(secHeader);
sigBuilder.setTokenIdentifier(tokenIdentifier);
sigBuilder.setSignatureAlgorithm(WSConstants.HMAC_SHA1);
sigBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document signedDoc = sigBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(signedDoc);
fail("Failure expected on a bad derived signature");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.FAILED_CHECK);
}
}
/**
* Test using a UsernameToken derived key for signing a SOAP body. In this test the
* user is "colm" rather than "bob", and so signature verification should fail.
*/
@Test
public void testDerivedKeyBadUserSignature() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setUserInfo("colm", "security");
builder.addDerivedKey(true, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(true);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key signature
//
WSSecDKSign sigBuilder = new WSSecDKSign(secHeader);
sigBuilder.setTokenIdentifier(tokenIdentifier);
sigBuilder.setSignatureAlgorithm(WSConstants.HMAC_SHA1);
sigBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document signedDoc = sigBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(signedDoc);
fail("Failure expected on a bad derived signature");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.FAILED_AUTHENTICATION);
}
}
/**
* Unit test for creating a Username Token with no salt element that is used for
* deriving a key for encryption.
*/
@Test
public void testNoSaltEncryption() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
UsernameToken usernameToken = new UsernameToken(true, doc, null);
usernameToken.setName("bob");
WSSConfig config = WSSConfig.getNewInstance();
usernameToken.setID(config.getIdAllocator().createId("UsernameToken-", usernameToken));
byte[] salt = UsernameTokenUtil.generateSalt(false);
usernameToken.addIteration(doc, 1000);
byte[] derivedKey = UsernameTokenUtil.generateDerivedKey("security", salt, 1000);
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
encrBuilder.setTokenIdentifier(usernameToken.getID());
encrBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document encryptedDoc = encrBuilder.build(derivedKey);
WSSecurityUtil.prependChildElement(
secHeader.getSecurityHeaderElement(), usernameToken.getElement()
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertFalse(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(encryptedDoc);
fail("Failure expected on no salt element");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.FAILURE);
}
}
/**
* Unit test for creating a Username Token with no iteration element that is used for
* deriving a key for encryption.
*/
@Test
public void testNoIterationEncryption() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
UsernameToken usernameToken = new UsernameToken(true, doc, null);
usernameToken.setName("bob");
WSSConfig config = WSSConfig.getNewInstance();
usernameToken.setID(config.getIdAllocator().createId("UsernameToken-", usernameToken));
byte[] salt = UsernameTokenUtil.generateSalt(false);
usernameToken.addSalt(doc, salt, false);
byte[] derivedKey = UsernameTokenUtil.generateDerivedKey("security", salt, 1000);
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
encrBuilder.setTokenIdentifier(usernameToken.getID());
encrBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document encryptedDoc = encrBuilder.build(derivedKey);
WSSecurityUtil.prependChildElement(
secHeader.getSecurityHeaderElement(), usernameToken.getElement()
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertFalse(outputString.contains("wsse11:Iteration"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(encryptedDoc);
fail("Failure expected on no iteration element");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.INVALID_SECURITY_TOKEN);
}
}
/**
* Unit test for creating a Username Token with an iteration value < 1000 that is used for
* deriving a key for encryption.
*/
@Test
public void testLowIterationEncryption() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
UsernameToken usernameToken = new UsernameToken(true, doc, null);
usernameToken.setName("bob");
WSSConfig config = WSSConfig.getNewInstance();
usernameToken.setID(config.getIdAllocator().createId("UsernameToken-", usernameToken));
usernameToken.addIteration(doc, 500);
byte[] salt = UsernameTokenUtil.generateSalt(false);
usernameToken.addSalt(doc, salt, false);
byte[] derivedKey = UsernameTokenUtil.generateDerivedKey("security", salt, 500);
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
encrBuilder.setTokenIdentifier(usernameToken.getID());
encrBuilder.setCustomValueType(WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE);
Document encryptedDoc = encrBuilder.build(derivedKey);
WSSecurityUtil.prependChildElement(
secHeader.getSecurityHeaderElement(), usernameToken.getElement()
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(encryptedDoc);
fail("Failure expected on a low iteration value");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.INVALID_SECURITY);
}
RequestData data = new RequestData();
data.setCallbackHandler(callbackHandler);
data.setDecCrypto(crypto);
data.setIgnoredBSPRules(Collections.singletonList(BSPRule.R4218));
data.setAllowUsernameTokenNoPassword(true);
WSSecurityEngine engine = new WSSecurityEngine();
engine.setWssConfig(config);
engine.processSecurityHeader(doc, data);
}
/**
* Test using a UsernameToken derived key for encrypting a SOAP body. The Reference to the
* UsernameToken contains a non-standard value type, which is rejected when the corresponding
* BSP rule is turned on.
*/
@Test
public void testBadValueType() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setUserInfo("bob", "security");
builder.addDerivedKey(false, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(false);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
encrBuilder.setTokenIdentifier(tokenIdentifier);
encrBuilder.setCustomValueType(WSConstants.WSS_SAML_TOKEN_TYPE);
Document encryptedDoc = encrBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(encryptedDoc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
assertFalse(outputString.contains("testMethod"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(encryptedDoc);
fail("Failure expected on a bad value type");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.INVALID_SECURITY);
}
// Turn off BSP compliance and it should work
RequestData data = new RequestData();
data.setCallbackHandler(callbackHandler);
data.setDecCrypto(crypto);
data.setAllowUsernameTokenNoPassword(true);
WSSConfig config = WSSConfig.getNewInstance();
WSSecurityEngine newEngine = new WSSecurityEngine();
newEngine.setWssConfig(config);
data.setIgnoredBSPRules(Collections.singletonList(BSPRule.R4214));
newEngine.processSecurityHeader(encryptedDoc, data);
}
/**
* Test using a UsernameToken derived key for encrypting a SOAP body. A KeyIdentifier is
* used to refer to the UsernameToken, which is forbidden by the BSP.
*/
@Test
public void testKeyIdentifier() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecUsernameToken builder = new WSSecUsernameToken(secHeader);
builder.setUserInfo("bob", "security");
builder.addDerivedKey(false, 1000);
byte[] salt = UsernameTokenUtil.generateSalt(false);
builder.prepare(salt);
byte[] derivedKey = builder.getDerivedKey(salt);
assertTrue(derivedKey.length == 20);
String tokenIdentifier = builder.getId();
//
// Derived key encryption
//
WSSecDKEncrypt encrBuilder = new WSSecDKEncrypt(secHeader);
encrBuilder.setSymmetricEncAlgorithm(WSConstants.AES_128);
SecurityTokenReference strEncKey = new SecurityTokenReference(doc);
strEncKey.setKeyIdentifier(
WSConstants.WSS_USERNAME_TOKEN_VALUE_TYPE, tokenIdentifier, true
);
encrBuilder.setStrElem(strEncKey.getElement());
Document encryptedDoc = encrBuilder.build(derivedKey);
builder.prependToHeader();
String outputString =
XMLUtils.prettyDocumentToString(encryptedDoc);
assertTrue(outputString.contains("wsse:Username"));
assertFalse(outputString.contains("wsse:Password"));
assertTrue(outputString.contains("wsse11:Salt"));
assertTrue(outputString.contains("wsse11:Iteration"));
assertFalse(outputString.contains("testMethod"));
if (LOG.isDebugEnabled()) {
LOG.debug(outputString);
}
try {
verify(encryptedDoc);
fail("Failure expected on a key identifier");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.INVALID_SECURITY);
}
WSSecurityEngine newEngine = new WSSecurityEngine();
RequestData data = new RequestData();
data.setCallbackHandler(callbackHandler);
data.setDecCrypto(crypto);
data.setIgnoredBSPRules(Collections.singletonList(BSPRule.R4215));
data.setAllowUsernameTokenNoPassword(true);
WSSConfig config = WSSConfig.getNewInstance();
newEngine.setWssConfig(config);
newEngine.processSecurityHeader(encryptedDoc, data);
}
/**
* Verifies the soap envelope.
*
* @param env soap envelope
* @throws java.lang.Exception Thrown when there is a problem in verification
*/
private WSHandlerResult verify(Document doc) throws Exception {
return verify(doc, true);
}
private WSHandlerResult verify(
Document doc,
boolean allowUsernameTokenDerivedKeys
) throws Exception {
WSSecurityEngine secEngine = new WSSecurityEngine();
RequestData requestData = new RequestData();
requestData.setAllowUsernameTokenNoPassword(allowUsernameTokenDerivedKeys);
requestData.setCallbackHandler(callbackHandler);
requestData.setDecCrypto(crypto);
requestData.setSigVerCrypto(crypto);
return secEngine.processSecurityHeader(doc, requestData);
}
}
| |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.crate.repository.support;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.data.annotation.Id;
import org.springframework.data.crate.core.CrateOperations;
import org.springframework.data.crate.repository.CrateRepository;
/**
* @author Hasnain Javed
* @since 1.0.0
*/
@RunWith(MockitoJUnitRunner.class)
public class SimpleCrateRepositoryTest {
@Mock
private CrateOperations crateOperations;
private CrateEntityInformation<EntityWithId, String> entityWithIdInformation = new EntityWithIdInformation();
private CrateEntityInformation<EntityWithOutId, String> entityWithoutIdInformation = new EntityWithoutIdInformation();
private CrateRepository<EntityWithId, String> entityWithIdRepository;
private CrateRepository<EntityWithOutId, String> entityWithoutIdRepository;
@Before
public void setup() {
entityWithIdRepository = new SimpleCrateRepository<EntityWithId, String>(entityWithIdInformation, crateOperations);
entityWithoutIdRepository = new SimpleCrateRepository<EntityWithOutId, String>(entityWithoutIdInformation, crateOperations);
}
@Test
public void shouldSaveEntityWithId() {
EntityWithId entity = new EntityWithId("hasnain@test.com", "Hasnain");
when(crateOperations.findById(entity.getEmail(), EntityWithId.class,"entitywithid")).thenReturn(null);
entityWithIdRepository.save(entity);
verify(crateOperations).findById(entity.getEmail(), EntityWithId.class, "entitywithid");
verify(crateOperations).insert(any(EntityWithId.class), eq("entitywithid"));
verify(crateOperations, never()).update(any(EntityWithId.class), anyString());
}
@Test
public void shouldUpdateEntityWithId() {
EntityWithId entity = new EntityWithId("hasnain@test.com", "Hasnain");
when(crateOperations.findById(entity.getEmail(), EntityWithId.class,"entitywithid")).thenReturn(entity);
entityWithIdRepository.save(entity);
verify(crateOperations).findById(entity.getEmail(), EntityWithId.class, "entitywithid");
verify(crateOperations).update(any(EntityWithId.class), eq("entitywithid"));
verify(crateOperations, never()).insert(any(EntityWithId.class), anyString());
}
@Test
public void shouldSaveEntityWithNoId() {
EntityWithOutId entity = new EntityWithOutId("hasnain@test.com", "Hasnain");
entityWithoutIdRepository.save(entity);
verify(crateOperations).insert(any(EntityWithOutId.class), eq("entitywithoutid"));
verify(crateOperations, never()).findById(anyObject(), eq(EntityWithOutId.class), anyString());
verify(crateOperations, never()).update(any(EntityWithOutId.class), anyString());
}
static class EntityWithId {
@Id
private String email;
private String name;
public EntityWithId(String email, String name) {
super();
this.email = email;
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
static class EntityWithOutId {
private String email;
private String name;
public EntityWithOutId(String email, String name) {
super();
this.email = email;
this.name = name;
}
}
private static class EntityWithIdInformation implements CrateEntityInformation<EntityWithId, String> {
@Override
public boolean isNew(EntityWithId entity) {
return entity.getEmail() == null;
}
@Override
public String getId(EntityWithId entity) {
return entity.getEmail();
}
@Override
public Class<String> getIdType() {
return String.class;
}
@Override
public Class<EntityWithId> getJavaType() {
return EntityWithId.class;
}
@Override
public String getTableName() {
return EntityWithId.class.getSimpleName().toLowerCase();
}
@Override
public String getIdAttribute() {
return "email";
}
@Override
public Long getVersion(EntityWithId entity) {
return null;
}
}
private static class EntityWithoutIdInformation implements CrateEntityInformation<EntityWithOutId, String> {
@Override
public boolean isNew(EntityWithOutId entity) {
return true;
}
@Override
public String getId(EntityWithOutId entity) {
return null;
}
@Override
public Class<String> getIdType() {
return String.class;
}
@Override
public Class<EntityWithOutId> getJavaType() {
return EntityWithOutId.class;
}
@Override
public String getTableName() {
return EntityWithOutId.class.getSimpleName().toLowerCase();
}
@Override
public String getIdAttribute() {
return null;
}
@Override
public Long getVersion(EntityWithOutId entity) {
return null;
}
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.bbg.referencedata;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.fudgemsg.FudgeContext;
import org.fudgemsg.FudgeMsg;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.google.common.base.Objects;
import com.google.common.collect.Lists;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.PublicSPI;
/**
* Reference data specific to a single identifier.
* <p>
* This class is mutable and not thread-safe.
*/
@PublicSPI
@BeanDefinition
public class ReferenceData extends DirectBean {
/**
* The identifier that this reference data is for.
*/
@PropertyDefinition(validate = "notNull")
private String _identifier;
/**
* The reference data that was obtained. The key is the data field, the value is the reference data value.
*/
@PropertyDefinition(validate = "notNull")
private FudgeMsg _fieldValues = FudgeContext.EMPTY_MESSAGE;
/**
* The errors. This includes errors related to the identifier and to a single field.
*/
@PropertyDefinition
private final List<ReferenceDataError> _errors = Lists.newArrayList();
/**
* The unique list of bloomberg EIDs.
*/
@PropertyDefinition
private final Set<Integer> _eidValues = new TreeSet<>();
/**
* Creates an instance.
*/
protected ReferenceData() {
}
/**
* Creates an instance.
*
* @param identifier
* the identifier, not null
*/
public ReferenceData(final String identifier) {
setIdentifier(identifier);
}
/**
* Creates an instance.
*
* @param identifier
* the identifier, not null
* @param fieldValues
* the field-value map, not null
*/
public ReferenceData(final String identifier, final FudgeMsg fieldValues) {
setIdentifier(identifier);
setFieldValues(fieldValues);
}
// -------------------------------------------------------------------------
/**
* Adds an error to the list contained.
*
* @param error
* the reference data error to add, not null
*/
public void addError(final ReferenceDataError error) {
ArgumentChecker.notNull(error, "error");
getErrors().add(error);
}
/**
* Removes all errors for the specified field.
*
* @param field
* the field to remove, null means the whole-identifier errors
*/
public void removeErrors(final String field) {
for (final Iterator<ReferenceDataError> it = getErrors().iterator(); it.hasNext();) {
final ReferenceDataError error = it.next();
if (Objects.equal(field, error.getField())) {
it.remove();
}
}
}
/**
* Checks if the whole identifier was in error.
*
* @return true if the whole identifier was in error
*/
public boolean isIdentifierError() {
return isError(null);
}
/**
* Checks if a field was in error.
*
* @param field
* the field to check, null for the whole identifier
* @return true if the whole identifier was in error
*/
public boolean isError(final String field) {
for (final ReferenceDataError error : getErrors()) {
if (Objects.equal(field, error.getField())) {
return true;
}
}
return false;
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code ReferenceData}.
* @return the meta-bean, not null
*/
public static ReferenceData.Meta meta() {
return ReferenceData.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(ReferenceData.Meta.INSTANCE);
}
@Override
public ReferenceData.Meta metaBean() {
return ReferenceData.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the identifier that this reference data is for.
* @return the value of the property, not null
*/
public String getIdentifier() {
return _identifier;
}
/**
* Sets the identifier that this reference data is for.
* @param identifier the new value of the property, not null
*/
public void setIdentifier(String identifier) {
JodaBeanUtils.notNull(identifier, "identifier");
this._identifier = identifier;
}
/**
* Gets the the {@code identifier} property.
* @return the property, not null
*/
public final Property<String> identifier() {
return metaBean().identifier().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the reference data that was obtained. The key is the data field, the value is the reference data value.
* @return the value of the property, not null
*/
public FudgeMsg getFieldValues() {
return _fieldValues;
}
/**
* Sets the reference data that was obtained. The key is the data field, the value is the reference data value.
* @param fieldValues the new value of the property, not null
*/
public void setFieldValues(FudgeMsg fieldValues) {
JodaBeanUtils.notNull(fieldValues, "fieldValues");
this._fieldValues = fieldValues;
}
/**
* Gets the the {@code fieldValues} property.
* @return the property, not null
*/
public final Property<FudgeMsg> fieldValues() {
return metaBean().fieldValues().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the errors. This includes errors related to the identifier and to a single field.
* @return the value of the property, not null
*/
public List<ReferenceDataError> getErrors() {
return _errors;
}
/**
* Sets the errors. This includes errors related to the identifier and to a single field.
* @param errors the new value of the property, not null
*/
public void setErrors(List<ReferenceDataError> errors) {
JodaBeanUtils.notNull(errors, "errors");
this._errors.clear();
this._errors.addAll(errors);
}
/**
* Gets the the {@code errors} property.
* @return the property, not null
*/
public final Property<List<ReferenceDataError>> errors() {
return metaBean().errors().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the unique list of bloomberg EIDs.
* @return the value of the property, not null
*/
public Set<Integer> getEidValues() {
return _eidValues;
}
/**
* Sets the unique list of bloomberg EIDs.
* @param eidValues the new value of the property, not null
*/
public void setEidValues(Set<Integer> eidValues) {
JodaBeanUtils.notNull(eidValues, "eidValues");
this._eidValues.clear();
this._eidValues.addAll(eidValues);
}
/**
* Gets the the {@code eidValues} property.
* @return the property, not null
*/
public final Property<Set<Integer>> eidValues() {
return metaBean().eidValues().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public ReferenceData clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
ReferenceData other = (ReferenceData) obj;
return JodaBeanUtils.equal(getIdentifier(), other.getIdentifier()) &&
JodaBeanUtils.equal(getFieldValues(), other.getFieldValues()) &&
JodaBeanUtils.equal(getErrors(), other.getErrors()) &&
JodaBeanUtils.equal(getEidValues(), other.getEidValues());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getIdentifier());
hash = hash * 31 + JodaBeanUtils.hashCode(getFieldValues());
hash = hash * 31 + JodaBeanUtils.hashCode(getErrors());
hash = hash * 31 + JodaBeanUtils.hashCode(getEidValues());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(160);
buf.append("ReferenceData{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
protected void toString(StringBuilder buf) {
buf.append("identifier").append('=').append(JodaBeanUtils.toString(getIdentifier())).append(',').append(' ');
buf.append("fieldValues").append('=').append(JodaBeanUtils.toString(getFieldValues())).append(',').append(' ');
buf.append("errors").append('=').append(JodaBeanUtils.toString(getErrors())).append(',').append(' ');
buf.append("eidValues").append('=').append(JodaBeanUtils.toString(getEidValues())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code ReferenceData}.
*/
public static class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code identifier} property.
*/
private final MetaProperty<String> _identifier = DirectMetaProperty.ofReadWrite(
this, "identifier", ReferenceData.class, String.class);
/**
* The meta-property for the {@code fieldValues} property.
*/
private final MetaProperty<FudgeMsg> _fieldValues = DirectMetaProperty.ofReadWrite(
this, "fieldValues", ReferenceData.class, FudgeMsg.class);
/**
* The meta-property for the {@code errors} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<ReferenceDataError>> _errors = DirectMetaProperty.ofReadWrite(
this, "errors", ReferenceData.class, (Class) List.class);
/**
* The meta-property for the {@code eidValues} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Set<Integer>> _eidValues = DirectMetaProperty.ofReadWrite(
this, "eidValues", ReferenceData.class, (Class) Set.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"identifier",
"fieldValues",
"errors",
"eidValues");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -1618432855: // identifier
return _identifier;
case 427230908: // fieldValues
return _fieldValues;
case -1294635157: // errors
return _errors;
case 1553260930: // eidValues
return _eidValues;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends ReferenceData> builder() {
return new DirectBeanBuilder<ReferenceData>(new ReferenceData());
}
@Override
public Class<? extends ReferenceData> beanType() {
return ReferenceData.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code identifier} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifier() {
return _identifier;
}
/**
* The meta-property for the {@code fieldValues} property.
* @return the meta-property, not null
*/
public final MetaProperty<FudgeMsg> fieldValues() {
return _fieldValues;
}
/**
* The meta-property for the {@code errors} property.
* @return the meta-property, not null
*/
public final MetaProperty<List<ReferenceDataError>> errors() {
return _errors;
}
/**
* The meta-property for the {@code eidValues} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Integer>> eidValues() {
return _eidValues;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -1618432855: // identifier
return ((ReferenceData) bean).getIdentifier();
case 427230908: // fieldValues
return ((ReferenceData) bean).getFieldValues();
case -1294635157: // errors
return ((ReferenceData) bean).getErrors();
case 1553260930: // eidValues
return ((ReferenceData) bean).getEidValues();
}
return super.propertyGet(bean, propertyName, quiet);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case -1618432855: // identifier
((ReferenceData) bean).setIdentifier((String) newValue);
return;
case 427230908: // fieldValues
((ReferenceData) bean).setFieldValues((FudgeMsg) newValue);
return;
case -1294635157: // errors
((ReferenceData) bean).setErrors((List<ReferenceDataError>) newValue);
return;
case 1553260930: // eidValues
((ReferenceData) bean).setEidValues((Set<Integer>) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
@Override
protected void validate(Bean bean) {
JodaBeanUtils.notNull(((ReferenceData) bean)._identifier, "identifier");
JodaBeanUtils.notNull(((ReferenceData) bean)._fieldValues, "fieldValues");
JodaBeanUtils.notNull(((ReferenceData) bean)._errors, "errors");
JodaBeanUtils.notNull(((ReferenceData) bean)._eidValues, "eidValues");
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
package klaue.furrycrossposter.sites;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import klaue.furrycrossposter.ImageInfo;
import klaue.furrycrossposter.ImageInfo.Gender;
import klaue.furrycrossposter.ImageInfo.RatingSexual;
import klaue.furrycrossposter.ImageInfo.RatingViolence;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.Select;
import org.openqa.selenium.support.ui.WebDriverWait;
public class Weasyl extends Site {
private WebDriver driver;
@Override
public boolean doUpload(ImageInfo imageInfo) {
if (!canUpload(imageInfo)) return false;
// before we start, let's resize the images, if need be
Path imagePath = imageInfo.getImagePath();
Path thumbPath = imageInfo.getThumbPath();
driver = getDriver();
driver.get("https://www.weasyl.com/signin");
// wait for login
WebDriverWait wait = new WebDriverWait(driver, 60);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("username")));
driver.findElement(By.xpath("//a[@href='/submit']")).click();
driver.findElement(By.xpath("//a[@href='/submit/visual']")).click();
driver.findElement(By.id("submitfile")).sendKeys(imagePath.toString());
if (thumbPath != null) {
driver.findElement(By.name("thumbfile")).sendKeys(thumbPath.toString());
}
driver.findElement(By.id("submissiontitle")).sendKeys(imageInfo.getTitle());
if (!imageInfo.getDescription().isEmpty()) {
driver.findElement(By.id("submissiondesc")).sendKeys(imageInfo.getDescription());
}
// category
Select category = new Select(driver.findElement(By.id("submissioncat")));
switch (imageInfo.getType()) {
case DIGITAL:
category.selectByValue("1030");
break;
case SKETCH:
category.selectByValue("1010");
break;
case TRADITIONAL:
category.selectByValue("1020");
break;
}
//rating
Select rating = new Select(driver.findElement(By.id("submissionrating")));
if (imageInfo.getSexualRating() == RatingSexual.NONE && imageInfo.getViolenceRating() == RatingViolence.NONE) {
rating.selectByValue("10"); // general
} else if (imageInfo.getSexualRating() == RatingSexual.NONE && imageInfo.getViolenceRating() != RatingViolence.VIOLENCE_EX) {
// sex not, violence moderate. weasyl counts any nudity as mature, so better play it safe and make every
// sexual thing at least mature
rating.selectByValue("20"); // Moderate (13+)
} else if(imageInfo.getSexualRating() != RatingSexual.NUDITY_EX) {
// explicit violence and/or moderate nudity
rating.selectByValue("30"); // Mature (18+ non-sexual)
} else {
// nudity explicit, violence maybe too
rating.selectByValue("40"); // Explicit (18+ sexual)
}
// folders
Select folderSelect = new Select(driver.findElement(By.id("submissionfolder")));
// weasyl has noth folders and subfolders, both of which can be used
// in the select, they're seperated by a /, like "parentfolder / subfolder". The problem is that foldernames
// themselfes can contain /, so a folder named "sub / folder" inside a folder named "parent / folder" would
// show up as "parent / folder / sub / folder" in the list. To check if our folder is in it, we have to search
// for all names that either start with our folder, or have " / " before it.
// not a foolproof way, if he has one parent folder called "folder" and a sub-folder called "sub / folder", it
// would generate two entries in the list, "folder" and "folder / sub / folder". If we searched for "folder" in
// that way, both would be valid
//
// while weasyl can only contain / set one folder, but FurryCrossposter may contain many (for other sites), we can't
// just select the shortest one and be sure, but.. we still do. Screw the user anyway if he made such folders!
ArrayList<WebElement> foundFolders = new ArrayList<>();
for (WebElement entry : folderSelect.getOptions()) {
String currentFolderFromSelect = entry.getText().toLowerCase().replace(" ", "_");
for (String currentFolderFromImageInfo : imageInfo.getFolders()) {
if (currentFolderFromSelect.matches("^(.+_[\\/]_)*\\Q" + currentFolderFromImageInfo + "\\E$")) {
foundFolders.add(entry);
//break; NO! multiple may match
}
}
}
WebElement folderToSelect = null;
for (WebElement foundFolder : foundFolders) {
if (folderToSelect == null || folderToSelect.getText().length() > foundFolder.getText().length()) {
folderToSelect = foundFolder;
}
}
if (folderToSelect != null) {
folderSelect.selectByVisibleText(folderToSelect.getText());
}
if (imageInfo.isFriendsOnly()) driver.findElement(By.id("submit-friends")).click();
if (imageInfo.hasNoNotification()) driver.findElement(By.id("nonotifcation")).click();
driver.findElement(By.cssSelector("ul.listbuilder")).click();
driver.switchTo().activeElement().sendKeys(getTags(imageInfo) + " ");
driver.findElement(By.cssSelector("button.button.positive")).click();
showFinishMessage(driver);
//driver.quit();
return true;
}
@Override
public ArrayList<String> getErrorReasons(ImageInfo imageInfo) {
ArrayList<String> reasons = new ArrayList<String>();
/*
* main image
*/
// does file exist?
if (imageInfo.getImagePath() == null || !Files.exists(imageInfo.getImagePath())) {
reasons.add("no image");
} else {
// is file of correct type?
int i = imageInfo.getImagePath().getFileName().toString().lastIndexOf('.');
String extension = "";
if (i > 0) {
extension = imageInfo.getImagePath().getFileName().toString().substring(i+1).toLowerCase();
}
if (!extension.equals("jpeg") && !extension.equals("jpg") && !extension.equals("png") && !extension.equals("gif")) {
reasons.add("unsupported image type ." + extension);
} else {
// how about filesize
try {
long bytes = Files.size(imageInfo.getImagePath());
if (bytes > 10 * 1024 * 1024) { // 10 MB
reasons.add("image file too large (>10MB)");
}
} catch (IOException e) {
e.printStackTrace();
// shouldn't happen, but let's assume it's correct
}
}
}
/*
* thumb
*/
// only if set
if (imageInfo.getThumbPath() != null && Files.exists(imageInfo.getThumbPath())) {
// is file of correct type?
int i = imageInfo.getThumbPath().getFileName().toString().lastIndexOf('.');
String extension = "";
if (i > 0) {
extension = imageInfo.getThumbPath().getFileName().toString().substring(i+1).toLowerCase();
}
if (!extension.equals("jpeg") && !extension.equals("jpg") && !extension.equals("png") && !extension.equals("gif")) {
reasons.add("unsupported thumb image type ." + extension);
} else {
// how about filesize
try {
long bytes = Files.size(imageInfo.getThumbPath());
if (bytes > 36 * 1024 * 1024) { // 10 MB
reasons.add("thumb file too large (>10MB)");
}
} catch (IOException e) {
e.printStackTrace();
// shouldn't happen, but let's assume it's correct
}
}
}
if (imageInfo.getTitle().isEmpty()) reasons.add("no title");
int tagAmount = imageInfo.getGenders().size() + imageInfo.getSpeciesTags().size() + imageInfo.getKinkTags().size() + imageInfo.getOtherTags().size();
if (tagAmount < 2) reasons.add("needs at least 2 tags (incl. gender)");
if (imageInfo.getSexualRating() != ImageInfo.RatingSexual.NONE && (imageInfo.getOtherTags().contains("cub")
|| imageInfo.getKinkTags().contains("cub"))) {
reasons.add("cubs in sexual context");
}
if (imageInfo.isToScraps()) reasons.add("no support for scraps - use scraps folder, see weasyl help");
if (imageInfo.isUnlisted()) reasons.add("no support for 'unlisted'");
return reasons;
}
@Override
public ArrayList<String> getWarningReasons(ImageInfo imageInfo) {
ArrayList<String> reasons = new ArrayList<String>();
// none
return reasons;
}
@Override
public String getName() {
return "Weasyl";
}
private static String getTags(ImageInfo imageInfo) {
StringBuffer tags = new StringBuffer();
// add genders first
boolean containsIntersex = false;
for (Gender gender : imageInfo.getGenders()) {
tags.append(gender.getTag()).append(" ");
if (!containsIntersex) containsIntersex = !(gender.equals(Gender.M2F) || gender.equals(Gender.F2M) || gender.equals(Gender.MALE) || gender.equals(Gender.FEMALE) || gender.equals(Gender.AMBIGUOUS));
}
if (containsIntersex) tags.append("intersex ");
// add other tags
for (String species : imageInfo.getSpeciesTags()) {
tags.append(species).append(" ");
}
for (String kink : imageInfo.getKinkTags()) {
tags.append(kink).append(" ");
}
for (String other : imageInfo.getOtherTags()) {
tags.append(other).append(" ");
}
tags.deleteCharAt(tags.length() - 1);
return tags.toString();
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.plugins;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.startup.StartupActionScriptManager;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.application.ex.ApplicationInfoEx;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.updateSettings.impl.PluginDownloader;
import com.intellij.openapi.updateSettings.impl.UpdateSettings;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.GuiUtils;
import com.intellij.util.ArrayUtil;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashSet;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* @author stathik
*/
public class PluginInstaller {
private static final Logger LOG = Logger.getInstance(PluginInstaller.class);
public static final String UNKNOWN_HOST_MARKER = "__unknown_repository__";
private static final Object ourLock = new Object();
private PluginInstaller() { }
public static boolean prepareToInstall(List<PluginNode> pluginsToInstall,
List<? extends IdeaPluginDescriptor> allPlugins,
PluginManagerMain.PluginEnabler pluginEnabler,
@NotNull ProgressIndicator indicator) {
updateUrls(pluginsToInstall, indicator);
Set<PluginNode> dependant = new THashSet<>();
boolean install = prepareToInstall(pluginsToInstall, allPlugins, dependant, pluginEnabler, indicator);
for (PluginNode node : dependant) {
if (!pluginsToInstall.contains(node)) {
pluginsToInstall.add(node);
}
}
return install;
}
private static void updateUrls(List<? extends PluginNode> pluginsToInstall, @NotNull ProgressIndicator indicator) {
boolean unknownNodes = false;
for (PluginNode node : pluginsToInstall) {
if (node.getRepositoryName() == UNKNOWN_HOST_MARKER) {
unknownNodes = true;
break;
}
}
if (!unknownNodes) return;
List<String> hosts = ContainerUtil.newSmartList();
ContainerUtil.addIfNotNull(hosts, ApplicationInfoEx.getInstanceEx().getBuiltinPluginsUrl());
hosts.addAll(UpdateSettings.getInstance().getPluginHosts());
Map<PluginId, IdeaPluginDescriptor> allPlugins = ContainerUtil.newHashMap();
for (String host : hosts) {
try {
List<IdeaPluginDescriptor> descriptors = RepositoryHelper.loadPlugins(host, indicator);
for (IdeaPluginDescriptor descriptor : descriptors) {
allPlugins.put(descriptor.getPluginId(), descriptor);
}
}
catch (IOException ignored) { }
}
for (PluginNode node : pluginsToInstall) {
if (node.getRepositoryName() == UNKNOWN_HOST_MARKER) {
IdeaPluginDescriptor descriptor = allPlugins.get(node.getPluginId());
if (descriptor != null) {
node.setRepositoryName(((PluginNode)descriptor).getRepositoryName());
node.setDownloadUrl(((PluginNode)descriptor).getDownloadUrl());
}
else {
node.setRepositoryName(null);
}
}
}
}
private static boolean prepareToInstall(List<? extends PluginNode> pluginsToInstall,
List<? extends IdeaPluginDescriptor> allPlugins,
Set<PluginNode> installedDependant,
PluginManagerMain.PluginEnabler pluginEnabler,
@NotNull ProgressIndicator indicator) {
List<PluginId> pluginIds = new SmartList<>();
for (PluginNode pluginNode : pluginsToInstall) {
pluginIds.add(pluginNode.getPluginId());
}
boolean result = false;
for (PluginNode pluginNode : pluginsToInstall) {
indicator.setText(pluginNode.getName());
try {
result |= prepareToInstall(pluginNode, pluginIds, allPlugins, installedDependant, pluginEnabler, indicator);
}
catch (IOException e) {
String title = IdeBundle.message("title.plugin.error");
Notifications.Bus.notify(new Notification(title, title, pluginNode.getName() + ": " + e.getMessage(), NotificationType.ERROR));
return false;
}
}
return result;
}
private static boolean prepareToInstall(PluginNode pluginNode,
List<? extends PluginId> pluginIds,
List<? extends IdeaPluginDescriptor> allPlugins,
Set<PluginNode> installedDependant,
PluginManagerMain.PluginEnabler pluginEnabler,
@NotNull ProgressIndicator indicator) throws IOException {
installedDependant.add(pluginNode);
// check for dependent plugins at first.
if (pluginNode.getDepends() != null && pluginNode.getDepends().size() > 0) {
// prepare plugins list for install
final PluginId[] optionalDependentPluginIds = pluginNode.getOptionalDependentPluginIds();
final List<PluginNode> depends = new ArrayList<>();
final List<PluginNode> optionalDeps = new ArrayList<>();
for (int i = 0; i < pluginNode.getDepends().size(); i++) {
PluginId depPluginId = pluginNode.getDepends().get(i);
if (PluginManager.isPluginInstalled(depPluginId) || PluginManagerCore.isModuleDependency(depPluginId) ||
InstalledPluginsState.getInstance().wasInstalled(depPluginId) ||
(pluginIds != null && pluginIds.contains(depPluginId))) {
// ignore installed or installing plugins
continue;
}
IdeaPluginDescriptor depPluginDescriptor = findPluginInRepo(depPluginId, allPlugins);
PluginNode depPlugin;
if (depPluginDescriptor instanceof PluginNode) {
depPlugin = (PluginNode) depPluginDescriptor;
} else {
depPlugin = new PluginNode(depPluginId, depPluginId.getIdString(), "-1");
}
if (depPluginDescriptor != null) {
if (ArrayUtil.indexOf(optionalDependentPluginIds, depPluginId) != -1) {
optionalDeps.add(depPlugin);
}
else {
depends.add(depPlugin);
}
}
}
if (depends.size() > 0) { // has something to install prior installing the plugin
final boolean[] proceed = new boolean[1];
try {
GuiUtils.runOrInvokeAndWait(() -> {
String title = IdeBundle.message("plugin.manager.dependencies.detected.title");
String deps = StringUtil.join(depends, node -> node.getName(), ", ");
String message = IdeBundle.message("plugin.manager.dependencies.detected.message", depends.size(), deps);
proceed[0] = Messages.showYesNoDialog(message, title, Messages.getWarningIcon()) == Messages.YES;
});
}
catch (Exception e) {
return false;
}
if (!proceed[0] || !prepareToInstall(depends, allPlugins, installedDependant, pluginEnabler, indicator)) {
return false;
}
}
if (optionalDeps.size() > 0) {
final boolean[] proceed = new boolean[1];
try {
GuiUtils.runOrInvokeAndWait(() -> {
String title = IdeBundle.message("plugin.manager.dependencies.detected.title");
String deps = StringUtil.join(optionalDeps, node -> node.getName(), ", ");
String message = IdeBundle.message("plugin.manager.optional.dependencies.detected.message", optionalDeps.size(), deps);
proceed[0] = Messages.showYesNoDialog(message, title, Messages.getWarningIcon()) == Messages.YES;
});
}
catch (Exception e) {
return false;
}
if (proceed[0] && !prepareToInstall(optionalDeps, allPlugins, installedDependant, pluginEnabler, indicator)) {
return false;
}
}
}
Ref<IdeaPluginDescriptor> toDisable = Ref.create(null);
Optional<PluginReplacement> replacement = StreamEx.of(PluginReplacement.EP_NAME.getExtensions())
.findFirst(r -> r.getNewPluginId().equals(pluginNode.getPluginId().getIdString()));
if (replacement.isPresent()) {
PluginReplacement pluginReplacement = replacement.get();
IdeaPluginDescriptor oldPlugin = PluginManager.getPlugin(pluginReplacement.getOldPluginDescriptor().getPluginId());
if (oldPlugin == null) {
LOG.warn("Plugin with id '" + pluginReplacement.getOldPluginDescriptor().getPluginId() + "' not found");
}
else if (!pluginEnabler.isDisabled(oldPlugin.getPluginId())) {
ApplicationManager.getApplication().invokeAndWait(() -> {
String title = IdeBundle.message("plugin.manager.obsolete.plugins.detected.title");
String message = pluginReplacement.getReplacementMessage(oldPlugin, pluginNode);
if (Messages.showYesNoDialog(message, title, Messages.getWarningIcon()) == Messages.YES) {
toDisable.set(oldPlugin);
}
});
}
}
PluginDownloader downloader = PluginDownloader.createDownloader(pluginNode, pluginNode.getRepositoryName(), null);
if (downloader.prepareToInstall(indicator)) {
synchronized (ourLock) {
downloader.install();
}
pluginNode.setStatus(PluginNode.STATUS_DOWNLOADED);
if (!toDisable.isNull()) {
pluginEnabler.disablePlugins(Collections.singleton(toDisable.get()));
}
}
else {
return false;
}
return true;
}
@Nullable
private static IdeaPluginDescriptor findPluginInRepo(PluginId depPluginId, List<? extends IdeaPluginDescriptor> allPlugins) {
return allPlugins.stream().parallel().filter(p -> p.getPluginId().equals(depPluginId)).findAny().orElse(null);
}
public static void prepareToUninstall(PluginId pluginId) throws IOException {
synchronized (ourLock) {
if (PluginManager.isPluginInstalled(pluginId)) {
IdeaPluginDescriptor pluginDescriptor = PluginManager.getPlugin(pluginId);
if (pluginDescriptor == null) {
PluginManagerMain.LOG.error("Plugin not found: " + pluginId);
}
else if (pluginDescriptor.isBundled()) {
PluginManagerMain.LOG.error("Plugin is bundled: " + pluginId);
}
else {
StartupActionScriptManager.addActionCommand(new StartupActionScriptManager.DeleteCommand(pluginDescriptor.getPath()));
fireState(pluginDescriptor, false);
}
}
}
}
public static void install(@NotNull File sourceFile,
boolean deleteSourceFile,
@Nullable File existingPlugin,
@NotNull IdeaPluginDescriptor descriptor) throws IOException {
List<StartupActionScriptManager.ActionCommand> commands = new ArrayList<>();
if (existingPlugin != null) {
commands.add(new StartupActionScriptManager.DeleteCommand(existingPlugin));
}
String pluginsPath = PathManager.getPluginsPath();
if (sourceFile.getName().endsWith(".jar")) {
commands.add(new StartupActionScriptManager.CopyCommand(sourceFile, new File(pluginsPath, sourceFile.getName())));
}
else {
commands.add(new StartupActionScriptManager.DeleteCommand(new File(pluginsPath, rootEntryName(sourceFile)))); // drops stale directory
commands.add(new StartupActionScriptManager.UnzipCommand(sourceFile, new File(pluginsPath)));
}
if (deleteSourceFile) {
commands.add(new StartupActionScriptManager.DeleteCommand(sourceFile));
}
StartupActionScriptManager.addActionCommands(commands);
fireState(descriptor, true);
}
private static String rootEntryName(File zip) throws IOException {
try (ZipFile zipFile = new ZipFile(zip)) {
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry zipEntry = entries.nextElement();
// we do not necessarily get a separate entry for the subdirectory when the file
// in the ZIP archive is placed in a subdirectory, so we need to check if the slash
// is found anywhere in the path
String name = zipEntry.getName();
int i = name.indexOf('/');
if (i > 0) return name.substring(0, i);
}
}
throw new IOException("Corrupted archive (no file entries): " + zip);
}
private static final List<PluginStateListener> myStateListeners = ContainerUtil.createLockFreeCopyOnWriteList();
public static void addStateListener(@NotNull PluginStateListener listener) {
myStateListeners.add(listener);
}
public static void removeStateListener(@NotNull PluginStateListener listener) {
myStateListeners.remove(listener);
}
private static void fireState(@NotNull IdeaPluginDescriptor descriptor, boolean install) {
UIUtil.invokeLaterIfNeeded(() -> {
for (PluginStateListener listener : myStateListeners) {
if (install) {
listener.install(descriptor);
}
else {
listener.uninstall(descriptor);
}
}
});
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
/* ====================================================================
This product contains an ASLv2 licensed version of the OOXML signer
package from the eID Applet project
http://code.google.com/p/eid-applet/source/browse/trunk/README.txt
Copyright (C) 2008-2014 FedICT.
================================================================= */
package org.apache.poi.poifs.crypt.dsig.services;
import static org.apache.poi.POIXMLTypeLoader.DEFAULT_XML_OPTIONS;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.InvalidAlgorithmParameterException;
import java.security.Provider;
import java.security.Security;
import java.security.spec.AlgorithmParameterSpec;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import javax.xml.crypto.Data;
import javax.xml.crypto.MarshalException;
import javax.xml.crypto.OctetStreamData;
import javax.xml.crypto.XMLCryptoContext;
import javax.xml.crypto.XMLStructure;
import javax.xml.crypto.dom.DOMStructure;
import javax.xml.crypto.dsig.TransformException;
import javax.xml.crypto.dsig.TransformService;
import javax.xml.crypto.dsig.spec.TransformParameterSpec;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;
import org.apache.poi.util.XmlSort;
import org.apache.xmlbeans.XmlCursor;
import org.apache.xmlbeans.XmlException;
import org.apache.xmlbeans.XmlObject;
import org.apache.xmlbeans.XmlOptions;
import org.openxmlformats.schemas.xpackage.x2006.digitalSignature.CTRelationshipReference;
import org.openxmlformats.schemas.xpackage.x2006.digitalSignature.RelationshipReferenceDocument;
import org.openxmlformats.schemas.xpackage.x2006.relationships.CTRelationship;
import org.openxmlformats.schemas.xpackage.x2006.relationships.CTRelationships;
import org.openxmlformats.schemas.xpackage.x2006.relationships.RelationshipsDocument;
import org.openxmlformats.schemas.xpackage.x2006.relationships.STTargetMode;
import org.w3.x2000.x09.xmldsig.TransformDocument;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/**
* JSR105 implementation of the RelationshipTransform transformation.
*
* <p>
* Specs: http://openiso.org/Ecma/376/Part2/12.2.4#26
* </p>
*/
public class RelationshipTransformService extends TransformService {
public static final String TRANSFORM_URI = "http://schemas.openxmlformats.org/package/2006/RelationshipTransform";
private final List<String> sourceIds;
private static final POILogger LOG = POILogFactory.getLogger(RelationshipTransformService.class);
/**
* Relationship Transform parameter specification class.
*/
public static class RelationshipTransformParameterSpec implements TransformParameterSpec {
List<String> sourceIds = new ArrayList<String>();
public void addRelationshipReference(String relationshipId) {
sourceIds.add(relationshipId);
}
public boolean hasSourceIds() {
return !sourceIds.isEmpty();
}
}
public RelationshipTransformService() {
super();
LOG.log(POILogger.DEBUG, "constructor");
this.sourceIds = new ArrayList<String>();
}
/**
* Register the provider for this TransformService
*
* @see javax.xml.crypto.dsig.TransformService
*/
public static synchronized void registerDsigProvider() {
// the xml signature classes will try to find a special TransformerService,
// which is ofcourse unknown to JCE before ...
final String dsigProvider = "POIXmlDsigProvider";
if (Security.getProperty(dsigProvider) == null) {
Provider p = new Provider(dsigProvider, 1.0, dsigProvider){
static final long serialVersionUID = 1L;
};
p.put("TransformService." + TRANSFORM_URI, RelationshipTransformService.class.getName());
p.put("TransformService." + TRANSFORM_URI + " MechanismType", "DOM");
Security.addProvider(p);
}
}
@Override
public void init(TransformParameterSpec params) throws InvalidAlgorithmParameterException {
LOG.log(POILogger.DEBUG, "init(params)");
if (!(params instanceof RelationshipTransformParameterSpec)) {
throw new InvalidAlgorithmParameterException();
}
RelationshipTransformParameterSpec relParams = (RelationshipTransformParameterSpec) params;
for (String sourceId : relParams.sourceIds) {
this.sourceIds.add(sourceId);
}
}
@Override
public void init(XMLStructure parent, XMLCryptoContext context) throws InvalidAlgorithmParameterException {
LOG.log(POILogger.DEBUG, "init(parent,context)");
LOG.log(POILogger.DEBUG, "parent java type: " + parent.getClass().getName());
DOMStructure domParent = (DOMStructure) parent;
Node parentNode = domParent.getNode();
try {
TransformDocument transDoc = TransformDocument.Factory.parse(parentNode, DEFAULT_XML_OPTIONS);
XmlObject xoList[] = transDoc.getTransform().selectChildren(RelationshipReferenceDocument.type.getDocumentElementName());
if (xoList.length == 0) {
LOG.log(POILogger.WARN, "no RelationshipReference/@SourceId parameters present");
}
for (XmlObject xo : xoList) {
String sourceId = ((CTRelationshipReference)xo).getSourceId();
LOG.log(POILogger.DEBUG, "sourceId: ", sourceId);
this.sourceIds.add(sourceId);
}
} catch (XmlException e) {
throw new InvalidAlgorithmParameterException(e);
}
}
@Override
public void marshalParams(XMLStructure parent, XMLCryptoContext context) throws MarshalException {
LOG.log(POILogger.DEBUG, "marshallParams(parent,context)");
DOMStructure domParent = (DOMStructure) parent;
Element parentNode = (Element)domParent.getNode();
// parentNode.setAttributeNS(XML_NS, "xmlns:mdssi", XML_DIGSIG_NS);
Document doc = parentNode.getOwnerDocument();
for (String sourceId : this.sourceIds) {
RelationshipReferenceDocument relRef = RelationshipReferenceDocument.Factory.newInstance();
relRef.addNewRelationshipReference().setSourceId(sourceId);
Node n = relRef.getRelationshipReference().getDomNode();
n = doc.importNode(n, true);
parentNode.appendChild(n);
}
}
public AlgorithmParameterSpec getParameterSpec() {
LOG.log(POILogger.DEBUG, "getParameterSpec");
return null;
}
public Data transform(Data data, XMLCryptoContext context) throws TransformException {
LOG.log(POILogger.DEBUG, "transform(data,context)");
LOG.log(POILogger.DEBUG, "data java type: " + data.getClass().getName());
OctetStreamData octetStreamData = (OctetStreamData) data;
LOG.log(POILogger.DEBUG, "URI: " + octetStreamData.getURI());
InputStream octetStream = octetStreamData.getOctetStream();
RelationshipsDocument relDoc;
try {
relDoc = RelationshipsDocument.Factory.parse(octetStream, DEFAULT_XML_OPTIONS);
} catch (Exception e) {
throw new TransformException(e.getMessage(), e);
}
LOG.log(POILogger.DEBUG, "relationships document", relDoc);
CTRelationships rels = relDoc.getRelationships();
List<CTRelationship> relList = rels.getRelationshipList();
Iterator<CTRelationship> relIter = rels.getRelationshipList().iterator();
while (relIter.hasNext()) {
CTRelationship rel = relIter.next();
/*
* See: ISO/IEC 29500-2:2008(E) - 13.2.4.24 Relationships Transform
* Algorithm.
*/
if (!this.sourceIds.contains(rel.getId())) {
LOG.log(POILogger.DEBUG, "removing element: " + rel.getId());
relIter.remove();
} else {
if (!rel.isSetTargetMode()) {
rel.setTargetMode(STTargetMode.INTERNAL);
}
}
}
// TODO: remove non element nodes ???
LOG.log(POILogger.DEBUG, "# Relationship elements", relList.size());
XmlSort.sort(rels, new Comparator<XmlCursor>(){
public int compare(XmlCursor c1, XmlCursor c2) {
String id1 = ((CTRelationship)c1.getObject()).getId();
String id2 = ((CTRelationship)c2.getObject()).getId();
return id1.compareTo(id2);
}
});
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
XmlOptions xo = new XmlOptions();
xo.setSaveNoXmlDecl();
relDoc.save(bos, xo);
return new OctetStreamData(new ByteArrayInputStream(bos.toByteArray()));
} catch (IOException e) {
throw new TransformException(e.getMessage(), e);
}
}
public Data transform(Data data, XMLCryptoContext context, OutputStream os) throws TransformException {
LOG.log(POILogger.DEBUG, "transform(data,context,os)");
return null;
}
public boolean isFeatureSupported(String feature) {
LOG.log(POILogger.DEBUG, "isFeatureSupported(feature)");
return false;
}
}
| |
package com.dexode.adapter;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Created by Dawid Drozd aka Gelldur on 15.04.16.
*/
@RunWith(MockitoJUnitRunner.class)
public class RecyclerAdapterCommandManagerTest {
@Mock
RecyclerAdapterCommandManager.AdapterWrapper _adapterWrapper;
@Test
public void simpleTest() {
RecyclerAdapterCommandManager manager = new RecyclerAdapterCommandManager(_adapterWrapper);
manager.onAttachedToRecyclerView();
manager.pushBackOne(1);
manager.pushBackOne(2);
manager.pushBackOne(3);
manager.pushBackOne(4);
manager.pushBackOne(5);
manager.commit();
verify(_adapterWrapper, atLeastOnce()).notifyDataSetChanged();
}
@Test
public void checkTransaction() {
RecyclerAdapterCommandManager manager = new RecyclerAdapterCommandManager(_adapterWrapper);
manager.onAttachedToRecyclerView();
manager.pushBackOne(1);
manager.pushBackOne(2);
manager.pushBackOne(3);
manager.pushBackOne(4);
manager.pushBackOne(5);
manager.commit();
verify(_adapterWrapper, atLeastOnce()).notifyDataSetChanged();
manager.remove(0);
manager.insert(0, 1);
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, never()).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemInserted(anyInt());
verify(_adapterWrapper, never()).notifyItemRemoved(anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
manager.remove(1);
manager.insert(1, 2);
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, never()).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemInserted(anyInt());
verify(_adapterWrapper, never()).notifyItemRemoved(anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
manager.remove(2);
manager.insert(2, 3);
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, never()).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemInserted(anyInt());
verify(_adapterWrapper, never()).notifyItemRemoved(anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
manager.remove(3);
manager.insert(3, 4);
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, never()).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemInserted(anyInt());
verify(_adapterWrapper, never()).notifyItemRemoved(anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
manager.remove(4);
manager.insert(4, 5);
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, never()).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemInserted(anyInt());
verify(_adapterWrapper, never()).notifyItemRemoved(anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
}
@Test
public void checkClean() {
RecyclerAdapterCommandManager manager = new RecyclerAdapterCommandManager(_adapterWrapper);
manager.onAttachedToRecyclerView();
manager.pushBackOne(1);
manager.pushBackOne(2);
manager.pushBackOne(3);
manager.pushBackOne(4);
manager.pushBackOne(5);
manager.commit();
manager.reset();
manager.pushBackOne(1);
manager.pushBackOne(2);
manager.pushBackOne(3);
manager.pushBackOne(4);
manager.pushBackOne(5);
manager.commit();
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, never()).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemInserted(anyInt());
verify(_adapterWrapper, never()).notifyItemRemoved(anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
}
@Test
public void checkChanges() {
RecyclerAdapterCommandManager manager = new RecyclerAdapterCommandManager(_adapterWrapper);
manager.onAttachedToRecyclerView();
manager.pushBackOne(1);
manager.pushBackOne(2);
manager.pushBackOne(3);
manager.pushBackOne(4);
manager.pushBackOne(5);
manager.commit();
manager.reset();
manager.pushBackOne(1);
manager.pushBackOne(3);
manager.pushBackOne(5);
manager.commit();
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, never()).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemInserted(anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
verify(_adapterWrapper, times(1)).notifyItemRemoved(1);
verify(_adapterWrapper, times(1)).notifyItemRemoved(2);
}
@Test
public void checkChangesAddRemove() {
RecyclerAdapterCommandManager manager = new RecyclerAdapterCommandManager(_adapterWrapper);
manager.onAttachedToRecyclerView();
manager.pushBackOne(1);
manager.pushBackOne(2);
manager.pushBackOne(3);
manager.pushBackOne(4);
manager.pushBackOne(5);
manager.commit();
manager.reset();
manager.pushBackOne(1);
manager.pushBackOne(3);
manager.pushBackOne(6);
manager.pushBackOne(5);
manager.commit();
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, never()).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
verify(_adapterWrapper, times(1)).notifyItemRemoved(1);
verify(_adapterWrapper, times(1)).notifyItemRemoved(2);
verify(_adapterWrapper, times(1)).notifyItemInserted(2);
}
@Test
public void checkChangesMove() {
RecyclerAdapterCommandManager manager = new RecyclerAdapterCommandManager(_adapterWrapper);
manager.onAttachedToRecyclerView();
manager.pushBackOne(1);
manager.pushBackOne(2);
manager.pushBackOne(3);
manager.pushBackOne(4);
manager.pushBackOne(5);
manager.commit();
manager.reset();
manager.pushBackOne(5);
manager.pushBackOne(4);
manager.pushBackOne(3);
manager.pushBackOne(2);
manager.pushBackOne(1);
manager.commit();
verify(_adapterWrapper, times(2)).notifyDataSetChanged();
verify(_adapterWrapper, times(2)).notifyItemMoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeInserted(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemRangeRemoved(anyInt(), anyInt());
verify(_adapterWrapper, never()).notifyItemInserted(anyInt());
verify(_adapterWrapper, never()).notifyItemRemoved(anyInt());
verify(_adapterWrapper, never()).notifyItemRemoved(anyInt());
verify(_adapterWrapper, never()).notifyItemChanged(anyInt());
}
}
| |
/*
* Copyright (C) 2009-2012 The MITRE Corporation. See the toplevel
* file LICENSE for license terms.
*/
package org.mitre.mat.core;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.mitre.mat.core.AnnotationIDCache;
/**
* An Abstract class defining the common behavior of all Attribute Types.
* @author robyn
*/
public abstract class AttributeType {
/**
* The Aggregation type for an attribute whose value is a List of values
*/
public static final int LIST_AGGREGATION = 1;
/**
* The Aggregation type for an attribute whose value is a Set of values
*/
public static final int SET_AGGREGATION = 2;
/**
* The Aggregation type for an attribute whose value is a single value (default)
*/
public static final int NONE_AGGREGATION = 0;
/**
* A list of the names of the different aggregation types, in array order corresponding
* to the aggregation constants above.
*/
public static String[] aggregationString = new String[]{"none", "list", "set"};
// any invalid values will get converted to NONE
static int getAggregationFromString(String textValue) {
if (textValue.equals("list")) {
return LIST_AGGREGATION;
} else if (textValue.equals("set")) {
return SET_AGGREGATION;
} else {
return NONE_AGGREGATION;
}
}
/**
* The parent Atype to which this Attribute belongs
*/
protected Atype atype;
/**
* The name of the attribute
*/
protected String name;
/**
* Specifies whether or not the attribute's value is optional
*/
protected boolean optional;
/**
* Specifies whether or not this Attribute is a distinguishing
* attribute for equality. Used for scoring.
*/
protected boolean distinguishingAttrForEquality;
/**
* Specifies the aggregation type of this Attribute
* @see #LIST_AGGREGATION
* @see #SET_AGGREGATION
* @see #NONE_AGGREGATION
*/
protected int aggregationType;
/**
* The type of Attribute this is. Must be set by the implementing class.
*/
protected String type; /* must be set by the implementing class */
/**
* This Attribute's default value, if any.
*/
protected Object defaultValue;
/**
* Specifies whether this attribute's value should default to the text
* span of the annotation
*/
protected boolean defaultIsTextSpan;
private boolean hasDefault = false;
/**
* Specifies whether or not this attribute is considered a "choice attribute".
* A choice attribute is a "string" or "int" type Attribute whose value is
* restricted to a provided list of choices.
*/
protected boolean isChoiceAttribute = false;
/**
* Basic Constructor.
* @param atype the Atype to which this AttributeType will belong
* @param name the name of the attribute
* @throws AnnotationException
*/
public AttributeType(Atype atype, String name) throws AnnotationException {
this(atype, name, false, NONE_AGGREGATION, false, null, false);
}
/**
* Old constructor with defaults for defaultValue and defaultIsTextSpan for
* backward compatibility.
* @param atype
* @param name
* @param optional
* @param aggregation
* @param distinguishing
* @throws AnnotationException
*/
public AttributeType(Atype atype, String name, boolean optional, int aggregation,
boolean distinguishing) throws AnnotationException {
this(atype, name, optional, aggregation, distinguishing, null, false);
}
/**
* Full Constructor.
* @param atype the Atype to which this AttributeType will belong
* @param name the name of the attribute
* @param optional specifies whether or not this attribute's value is optional
* @param aggregation specifies the aggregation type of this attribute
* @param distinguishing specifies whether or not this attribute is a distinguishing
* attribute for equality
* @param defaultValue provides a default value (or null if there is no default)
* @param defaultIsTextSpan specifies whether or not this attribute's default value
* should be the annotation's text span (meaningless for
* spanless annotations). If this is true, you should not
* also provide a default value, and vice versa.
* @throws AnnotationException
*/
public AttributeType(Atype atype, String name, boolean optional, int aggregation,
boolean distinguishing, Object defaultValue, boolean defaultIsTextSpan)
throws AnnotationException {
this.atype = atype;
this.aggregationType = aggregation;
if (aggregation != NONE_AGGREGATION && aggregation != LIST_AGGREGATION
&& aggregation != SET_AGGREGATION) {
throw new AnnotationException("unknown attribute aggregation type"
+ aggregation);
}
this.distinguishingAttrForEquality = distinguishing;
this.name = name;
this.optional = optional;
this.defaultValue = defaultValue;
this.defaultIsTextSpan = defaultIsTextSpan;
}
/**
*
* @return a copy of this AttributeType for the same Atype
* @throws AnnotationException
*/
public AttributeType copy() throws AnnotationException {
return copy(this.atype);
}
/**
* Checks whether a default value has been specified, and if so if it is acceptable.
* @throws AnnotationException
*/
protected void manageDefaults() throws AnnotationException {
if (defaultValue != null || defaultIsTextSpan) {
if (defaultValue != null && defaultIsTextSpan) {
throw new AnnotationException("can't declare both default value and defaultIsTextSpan for attribute " + name);
}
if (aggregationType > 0) {
throw new AnnotationException("can't declare default for aggregated "
+ type + " attribute: " + name);
}
if (defaultValue != null && !checkValue(defaultValue)) {
throw new AnnotationException("default: " + defaultValue + " for "
+ type + " attribute " + name + "does not meet the attribute requirements");
} else {
if (!atype.hasSpan()) {
throw new AnnotationException("can't use text span as default for spanless annotation");
}
}
this.hasDefault = true;
}
}
/**
*
* @param annot
* @return the default value for this attribute in the given Annotation
* @throws AnnotationException
*/
public Object getAttributeDefault(AnnotationCore annot) throws AnnotationException {
if (hasDefault) {
if (defaultIsTextSpan) {
return extractAndCoerceTextExtent((Annotation) annot);
} else {
return defaultValue;
}
} else {
return null;
}
}
// upon request of John Aberdeen, null and non-parsing values be set to null rather than throwing an exception
private Object extractAndCoerceTextExtent(Annotation annot) throws AnnotationException {
Object val = this.digestSingleValueFromString(annot.getDoc().getSignal().substring(annot.getStartIndex(), annot.getEndIndex()));
if (val != null && !this.checkValue(val)) {
val = null;
}
return val;
}
// implementations will return appropriate subtypes
/**
* Copy this AttributeType. (Implementations should return the appropriate subtypes)
* @param atype The Atype this attribute is being copied to
* @return a copy of this AttributeType for the given Atype
* @throws AnnotationException
*/
public abstract AttributeType copy(Atype atype) throws AnnotationException;
/**
* Copy this AttributeType without necessarily doing all of the validations.
* Defaults to calling the <code>copy(Atype)</code> method but can be overridden
* in subclasses to provide a quicker (and possibly safer) copy when certain
* validations can/must be skipped. Most users should call <code>copy(Atype)</code>
* instead. This is called by <code>Atype.copy</code> when copying all of an
* already-validated Atype.
*
* @param atype The Atype this attribute is being copied to
* @return A copy of this instance of AttributeType for and pointing to the Atype passed in
* @throws AnnotationException
* @see #copy(Atype)
*/
public AttributeType quickCopy(Atype atype) throws AnnotationException {
return this.copy(atype);
}
/**
* Check the validity of a proposed value for this attribute.
* Will use polymorphism to figure out if checking a list, set or single value
* and then check the aggregationType to make sure it's appropriate.
* @param v
* @return
*/
public abstract boolean checkValue(Object v);
/**
* Check the validity of and import a value for this attribute. For Annotation
* attributes, importing involves registering the reference. For other types
* importing doesn't do anything.
* @param doc
* @param value
* @return
*/
public abstract boolean checkAndImportSingleValue(MATDocument doc, Object value);
// called when values are deleted or removed from the collection
// used for Annotation attributes, to clear reference hash
/**
* Clear the value of this attribute. Called when values are deleted or
* removed from the collection, and used for Annotation attributes, to
* clear the reference hash.
* @param doc
*/
public abstract void clearValue(MATDocument doc);
/**
* Attempts to coerce a String value into the type appropriate for this Annotation
* @param val
* @return
* @throws AnnotationException
*/
public abstract Object digestSingleValueFromString(String val) throws AnnotationException;
// returns the type of the attribute
// possible values are "string", "int", "float", "boolean", "annotation"
/**
*
* @return the type of the annotation -- it will be one of the constants
* as defined in <code>Atype</code>
* @see Atype#ANNOTATION_ATTR_TYPE
* @see Atype#BOOLEAN_ATTR_TYPE
* @see Atype#FLOAT_ATTR_TYPE
* @see Atype#INT_ATTR_TYPE
* @see Atype#STRING_ATTR_TYPE
*/
public String getType() {
return type;
}
/**
*
* @return this attributes aggregation type -- it will be one of the constants
* as defined in this class
* @see NONE_AGGREGATION
* @see LIST_AGGREGATION
* @see SET_AGGREGATION
*/
public int getAggregationType() {
return aggregationType;
}
/**
*
* @return this Attribute's parent Atype
*/
public Atype getAtype() {
return atype;
}
/**
*
* @return whether this attribute is a distinguishing attribute for equality
*/
public boolean isDistinguishingAttrForEquality() {
return distinguishingAttrForEquality;
}
/**
*
* @return this attribute's name
*/
public String getName() {
return name;
}
/**
*
* @return whether this attribute's value is optional
*/
public boolean isOptional() {
return optional;
}
/**
* Sets whether this attribute is a distinguishing attribute for equality.
* @param distinguishingAttrForEquality
*/
public void setDistinguishingAttrForEquality(boolean distinguishingAttrForEquality) {
this.distinguishingAttrForEquality = distinguishingAttrForEquality;
}
/**
* Adds restrictions on the attribute values.
* @param restrictions
* @throws AnnotationException
*/
public abstract void addRestrictions(Map restrictions) throws AnnotationException;
/**
* returns whether or not this attribute has a default value specified
* @return
*/
public boolean hasDefault() {
return hasDefault;
}
/*** use getAttributedefault instead
Object getDefaultValue() {
return defaultValue;
}
* /
/**
* Determine whether a candidate value is acceptable for this choice
* attribute within the given Annotation.
* This is general functionality for all singleton choice attributes. If
* you're about to change one of these values, you need to know if it CAN
* be changed - and it can be changed if the annotation isn't attached to
* anything, or if the resulting set of choice attributes satisfy SOME
* restriction on EACH of the places it's attached to.
*
* @param annot
* @param candidateVal
* @return true if the value is acceptable, or if this isn't really a
* choiceAttribute, and false otherwise
*/
public boolean choiceAttributeOK(AnnotationCore annot, Object candidateVal) {
if (!isChoiceAttribute) {
// shouldn't be called in this case, but whatever.
return true;
}
String id = annot.getID(false);
if (id == null) {
// if it doesn't have an ID, nothing can be pointing to it so any attribute value is fine
return true;
}
AnnotationIDCache cache = annot.getDoc().getIDCache();
Set<AnnotationIDCache.AnnotationReference> refs = cache.getReferringAnnots(id);
if (refs == null || refs.isEmpty()) {
return true;
}
/*
* So now we have a set of refs, and what we need
* to do is grab the label and choice vals
* from the annot, ladle the candidate on top,
* and make sure that the result satisfies at least
* one set of restrictions for each reference.
* We only need the label and choice vals because
* only choice vals can be part of the label
* restrictions.
*/
Map<String, Object> attrDict = new HashMap<String, Object>();
List<AttributeType> attributes = annot.getAttributes();
List attrVals = annot.getAttributeValues();
for (int i = 0; i < attributes.size(); i++) {
if (attributes.get(i).isChoiceAttribute) {
attrDict.put(attributes.get(i).getName(), attrVals.get(i));
}
}
attrDict.put(this.name, candidateVal);
for (AnnotationIDCache.AnnotationReference parentAnnotRef : refs) {
AnnotationCore parentAnnot = parentAnnotRef.getAnnot();
AnnotationAttributeType parentAttrType = (AnnotationAttributeType)
parentAnnot.getParentAtype().getAttributeType(parentAnnotRef.getAttrName());
if (!parentAttrType.choicesSatisfyRestrictions(annot.getParentAtype().getLabel(), attrDict)) {
return false;
}
}
return true;
}
}
| |
// Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionGraph;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ArtifactFactory;
import com.google.devtools.build.lib.actions.ArtifactOwner;
import com.google.devtools.build.lib.actions.PackageRootResolver;
import com.google.devtools.build.lib.actions.Root;
import com.google.devtools.build.lib.analysis.DependencyResolver.Dependency;
import com.google.devtools.build.lib.analysis.ExtraActionArtifactsProvider.ExtraArtifactSet;
import com.google.devtools.build.lib.analysis.config.BinTools;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection;
import com.google.devtools.build.lib.analysis.config.ConfigMatchingProvider;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.events.StoredEventHandler;
import com.google.devtools.build.lib.packages.AspectParameters;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.NoSuchThingException;
import com.google.devtools.build.lib.packages.PackageSpecification;
import com.google.devtools.build.lib.packages.RawAttributeMapper;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.packages.TargetUtils;
import com.google.devtools.build.lib.packages.Type;
import com.google.devtools.build.lib.pkgcache.LoadingPhaseRunner.LoadingResult;
import com.google.devtools.build.lib.pkgcache.PackageManager;
import com.google.devtools.build.lib.rules.test.CoverageReportActionFactory;
import com.google.devtools.build.lib.rules.test.CoverageReportActionFactory.CoverageReportActionsWrapper;
import com.google.devtools.build.lib.rules.test.InstrumentedFilesProvider;
import com.google.devtools.build.lib.skyframe.ActionLookupValue;
import com.google.devtools.build.lib.skyframe.AspectValue;
import com.google.devtools.build.lib.skyframe.AspectValue.AspectKey;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey;
import com.google.devtools.build.lib.skyframe.CoverageReportValue;
import com.google.devtools.build.lib.skyframe.SkyframeAnalysisResult;
import com.google.devtools.build.lib.skyframe.SkyframeBuildView;
import com.google.devtools.build.lib.skyframe.SkyframeExecutor;
import com.google.devtools.build.lib.syntax.EvalException;
import com.google.devtools.build.lib.syntax.Label;
import com.google.devtools.build.lib.util.RegexFilter;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.WalkableGraph;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.OptionsBase;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.logging.Logger;
import javax.annotation.Nullable;
/**
* <p>The BuildView presents a semantically-consistent and transitively-closed
* dependency graph for some set of packages.
*
* <h2>Package design</h2>
*
* <p>This package contains the Blaze dependency analysis framework (aka
* "analysis phase"). The goal of this code is to perform semantic analysis of
* all of the build targets required for a given build, to report
* errors/warnings for any problems in the input, and to construct an "action
* graph" (see {@code lib.actions} package) correctly representing the work to
* be done during the execution phase of the build.
*
* <p><b>Configurations</b> the inputs to a build come from two sources: the
* intrinsic inputs, specified in the BUILD file, are called <em>targets</em>.
* The environmental inputs, coming from the build tool, the command-line, or
* configuration files, are called the <em>configuration</em>. Only when a
* target and a configuration are combined is there sufficient information to
* perform a build. </p>
*
* <p>Targets are implemented by the {@link Target} hierarchy in the {@code
* lib.packages} code. Configurations are implemented by {@link
* BuildConfiguration}. The pair of these together is represented by an
* instance of class {@link ConfiguredTarget}; this is the root of a hierarchy
* with different implementations for each kind of target: source file, derived
* file, rules, etc.
*
* <p>The framework code in this package (as opposed to its subpackages) is
* responsible for constructing the {@code ConfiguredTarget} graph for a given
* target and configuration, taking care of such issues as:
* <ul>
* <li>caching common subgraphs.
* <li>detecting and reporting cycles.
* <li>correct propagation of errors through the graph.
* <li>reporting universal errors, such as dependencies from production code
* to tests, or to experimental branches.
* <li>capturing and replaying errors.
* <li>maintaining the graph from one build to the next to
* avoid unnecessary recomputation.
* <li>checking software licenses.
* </ul>
*
* <p>See also {@link ConfiguredTarget} which documents some important
* invariants.
*/
public class BuildView {
/**
* Options that affect the <i>mechanism</i> of analysis. These are distinct from {@link
* com.google.devtools.build.lib.analysis.config.BuildOptions}, which affect the <i>value</i>
* of a BuildConfiguration.
*/
public static class Options extends OptionsBase {
@Option(name = "keep_going",
abbrev = 'k',
defaultValue = "false",
category = "strategy",
help = "Continue as much as possible after an error. While the "
+ "target that failed, and those that depend on it, cannot be "
+ "analyzed (or built), the other prerequisites of these "
+ "targets can be analyzed (or built) all the same.")
public boolean keepGoing;
@Option(name = "analysis_warnings_as_errors",
deprecationWarning = "analysis_warnings_as_errors is now a no-op and will be removed in"
+ " an upcoming Blaze release",
defaultValue = "false",
category = "strategy",
help = "Treat visible analysis warnings as errors.")
public boolean analysisWarningsAsErrors;
@Option(name = "discard_analysis_cache",
defaultValue = "false",
category = "strategy",
help = "Discard the analysis cache immediately after the analysis phase completes. "
+ "Reduces memory usage by ~10%, but makes further incremental builds slower.")
public boolean discardAnalysisCache;
@Option(name = "experimental_extra_action_filter",
defaultValue = "",
category = "experimental",
converter = RegexFilter.RegexFilterConverter.class,
help = "Filters set of targets to schedule extra_actions for.")
public RegexFilter extraActionFilter;
@Option(name = "experimental_extra_action_top_level_only",
defaultValue = "false",
category = "experimental",
help = "Only schedules extra_actions for top level targets.")
public boolean extraActionTopLevelOnly;
@Option(name = "version_window_for_dirty_node_gc",
defaultValue = "0",
category = "undocumented",
help = "Nodes that have been dirty for more than this many versions will be deleted"
+ " from the graph upon the next update. Values must be non-negative long integers,"
+ " or -1 indicating the maximum possible window.")
public long versionWindowForDirtyNodeGc;
}
private static Logger LOG = Logger.getLogger(BuildView.class.getName());
private final BlazeDirectories directories;
private final SkyframeExecutor skyframeExecutor;
private final SkyframeBuildView skyframeBuildView;
private final PackageManager packageManager;
private final BinTools binTools;
private BuildConfigurationCollection configurations;
private final ConfiguredRuleClassProvider ruleClassProvider;
private final ArtifactFactory artifactFactory;
/**
* A factory class to create the coverage report action. May be null.
*/
@Nullable private final CoverageReportActionFactory coverageReportActionFactory;
/**
* Used only for testing that we clear Skyframe caches correctly.
* TODO(bazel-team): Remove this once we get rid of legacy Skyframe synchronization.
*/
private boolean skyframeCacheWasInvalidated = false;
/**
* If the last build was executed with {@code Options#discard_analysis_cache} and we are not
* running Skyframe full, we should clear the legacy data since it is out-of-sync.
*/
private boolean skyframeAnalysisWasDiscarded = false;
@VisibleForTesting
public Set<SkyKey> getSkyframeEvaluatedTargetKeysForTesting() {
return skyframeBuildView.getEvaluatedTargetKeys();
}
/** The number of targets freshly evaluated in the last analysis run. */
public int getTargetsVisited() {
return skyframeBuildView.getEvaluatedTargetKeys().size();
}
/**
* Returns true iff Skyframe was invalidated during the analysis phase.
* TODO(bazel-team): Remove this once we do not need to keep legacy in sync with Skyframe.
*/
@VisibleForTesting
boolean wasSkyframeCacheInvalidatedDuringAnalysis() {
return skyframeCacheWasInvalidated;
}
public BuildView(BlazeDirectories directories, PackageManager packageManager,
ConfiguredRuleClassProvider ruleClassProvider,
SkyframeExecutor skyframeExecutor,
BinTools binTools, CoverageReportActionFactory coverageReportActionFactory) {
this.directories = directories;
this.packageManager = packageManager;
this.binTools = binTools;
this.coverageReportActionFactory = coverageReportActionFactory;
this.artifactFactory = new ArtifactFactory(directories.getExecRoot());
this.ruleClassProvider = ruleClassProvider;
this.skyframeExecutor = Preconditions.checkNotNull(skyframeExecutor);
this.skyframeBuildView =
new SkyframeBuildView(
new ConfiguredTargetFactory(ruleClassProvider),
artifactFactory,
skyframeExecutor,
new Runnable() {
@Override
public void run() {
clear();
}
},
binTools,
ruleClassProvider);
skyframeExecutor.setSkyframeBuildView(skyframeBuildView);
}
/** Returns the action graph. */
public ActionGraph getActionGraph() {
return new ActionGraph() {
@Override
public Action getGeneratingAction(Artifact artifact) {
return skyframeExecutor.getGeneratingAction(artifact);
}
};
}
/**
* Returns whether the given configured target has errors.
*/
@VisibleForTesting
public boolean hasErrors(ConfiguredTarget configuredTarget) {
return configuredTarget == null;
}
/**
* Sets the configurations. Not thread-safe. DO NOT CALL except from tests!
*/
@VisibleForTesting
public void setConfigurationsForTesting(BuildConfigurationCollection configurations) {
this.configurations = configurations;
skyframeBuildView.setTopLevelHostConfiguration(configurations.getHostConfiguration());
}
public BuildConfigurationCollection getConfigurationCollection() {
return configurations;
}
/**
* Clear the graphs of ConfiguredTargets and Artifacts.
*/
@VisibleForTesting
public void clear() {
artifactFactory.clear();
}
public ArtifactFactory getArtifactFactory() {
return artifactFactory;
}
@VisibleForTesting
WorkspaceStatusAction getLastWorkspaceBuildInfoActionForTesting() {
return skyframeExecutor.getLastWorkspaceStatusActionForTesting();
}
/**
* Returns a corresponding ConfiguredTarget, if one exists; otherwise throws an {@link
* NoSuchConfiguredTargetException}.
*/
@ThreadSafe
private ConfiguredTarget getConfiguredTarget(Target target, BuildConfiguration config)
throws NoSuchConfiguredTargetException {
ConfiguredTarget result =
getExistingConfiguredTarget(target.getLabel(), config);
if (result == null) {
throw new NoSuchConfiguredTargetException(target.getLabel(), config);
}
return result;
}
/**
* Obtains a {@link ConfiguredTarget} given a {@code label}, by delegating
* to the package cache and
* {@link #getConfiguredTarget(Target, BuildConfiguration)}.
*/
public ConfiguredTarget getConfiguredTarget(Label label, BuildConfiguration config)
throws NoSuchPackageException, NoSuchTargetException, NoSuchConfiguredTargetException {
return getConfiguredTarget(packageManager.getLoadedTarget(label), config);
}
public Iterable<ConfiguredTarget> getDirectPrerequisites(ConfiguredTarget ct)
throws InterruptedException {
return getDirectPrerequisites(ct, null);
}
public Iterable<ConfiguredTarget> getDirectPrerequisites(
ConfiguredTarget ct, @Nullable final LoadingCache<Label, Target> targetCache)
throws InterruptedException {
return skyframeExecutor.getConfiguredTargets(ct.getConfiguration(),
getDirectPrerequisiteDependencies(ct, targetCache), false);
}
public Iterable<Dependency> getDirectPrerequisiteDependencies(
ConfiguredTarget ct, @Nullable final LoadingCache<Label, Target> targetCache)
throws InterruptedException {
if (!(ct.getTarget() instanceof Rule)) {
return ImmutableList.of();
}
class SilentDependencyResolver extends DependencyResolver {
@Override
protected void invalidVisibilityReferenceHook(TargetAndConfiguration node, Label label) {
// The error must have been reported already during analysis.
}
@Override
protected void invalidPackageGroupReferenceHook(TargetAndConfiguration node, Label label) {
// The error must have been reported already during analysis.
}
@Override
protected Target getTarget(Label label) throws NoSuchThingException {
if (targetCache == null) {
return packageManager.getLoadedTarget(label);
}
try {
return targetCache.get(label);
} catch (ExecutionException e) {
// All lookups should succeed because we should not be looking up any targets in error.
throw new IllegalStateException(e);
}
}
}
DependencyResolver dependencyResolver = new SilentDependencyResolver();
TargetAndConfiguration ctgNode =
new TargetAndConfiguration(ct.getTarget(), ct.getConfiguration());
return dependencyResolver.dependentNodes(ctgNode, configurations.getHostConfiguration(),
getConfigurableAttributeKeys(ctgNode));
}
/**
* Returns ConfigMatchingProvider instances corresponding to the configurable attribute keys
* present in this rule's attributes.
*/
private Set<ConfigMatchingProvider> getConfigurableAttributeKeys(TargetAndConfiguration ctg) {
if (!(ctg.getTarget() instanceof Rule)) {
return ImmutableSet.of();
}
Rule rule = (Rule) ctg.getTarget();
ImmutableSet.Builder<ConfigMatchingProvider> keys = ImmutableSet.builder();
RawAttributeMapper mapper = RawAttributeMapper.of(rule);
for (Attribute attribute : rule.getAttributes()) {
for (Label label : mapper.getConfigurabilityKeys(attribute.getName(), attribute.getType())) {
if (Type.Selector.isReservedLabel(label)) {
continue;
}
try {
ConfiguredTarget ct = getConfiguredTarget(label, ctg.getConfiguration());
keys.add(Preconditions.checkNotNull(ct.getProvider(ConfigMatchingProvider.class)));
} catch (
NoSuchPackageException | NoSuchTargetException | NoSuchConfiguredTargetException e) {
// All lookups should succeed because we should not be looking up any targets in error.
throw new IllegalStateException(e);
}
}
}
return keys.build();
}
public TransitiveInfoCollection getGeneratingRule(OutputFileConfiguredTarget target) {
return target.getGeneratingRule();
}
@Override
public int hashCode() {
throw new UnsupportedOperationException(); // avoid nondeterminism
}
/**
* Return value for {@link BuildView#update} and {@code BuildTool.prepareToBuild}.
*/
public static final class AnalysisResult {
public static final AnalysisResult EMPTY =
new AnalysisResult(
ImmutableList.<ConfiguredTarget>of(),
ImmutableList.<AspectValue>of(),
null,
null,
null,
ImmutableList.<Artifact>of(),
ImmutableList.<ConfiguredTarget>of(),
ImmutableList.<ConfiguredTarget>of(),
null);
private final ImmutableList<ConfiguredTarget> targetsToBuild;
@Nullable private final ImmutableList<ConfiguredTarget> targetsToTest;
@Nullable private final String error;
private final ActionGraph actionGraph;
private final ImmutableSet<Artifact> artifactsToBuild;
private final ImmutableSet<ConfiguredTarget> parallelTests;
private final ImmutableSet<ConfiguredTarget> exclusiveTests;
@Nullable private final TopLevelArtifactContext topLevelContext;
private final ImmutableList<AspectValue> aspects;
private AnalysisResult(
Collection<ConfiguredTarget> targetsToBuild,
Collection<AspectValue> aspects,
Collection<ConfiguredTarget> targetsToTest,
@Nullable String error,
ActionGraph actionGraph,
Collection<Artifact> artifactsToBuild,
Collection<ConfiguredTarget> parallelTests,
Collection<ConfiguredTarget> exclusiveTests,
TopLevelArtifactContext topLevelContext) {
this.targetsToBuild = ImmutableList.copyOf(targetsToBuild);
this.aspects = ImmutableList.copyOf(aspects);
this.targetsToTest = targetsToTest == null ? null : ImmutableList.copyOf(targetsToTest);
this.error = error;
this.actionGraph = actionGraph;
this.artifactsToBuild = ImmutableSet.copyOf(artifactsToBuild);
this.parallelTests = ImmutableSet.copyOf(parallelTests);
this.exclusiveTests = ImmutableSet.copyOf(exclusiveTests);
this.topLevelContext = topLevelContext;
}
/**
* Returns configured targets to build.
*/
public Collection<ConfiguredTarget> getTargetsToBuild() {
return targetsToBuild;
}
/**
* Returns aspects of configured targets to build.
*
* <p>If this list is empty, build the targets returned by {@code getTargetsToBuild()}.
* Otherwise, only build these aspects of the targets returned by {@code getTargetsToBuild()}.
*/
public Collection<AspectValue> getAspects() {
return aspects;
}
/**
* Returns the configured targets to run as tests, or {@code null} if testing was not
* requested (e.g. "build" command rather than "test" command).
*/
@Nullable
public Collection<ConfiguredTarget> getTargetsToTest() {
return targetsToTest;
}
public ImmutableSet<Artifact> getAdditionalArtifactsToBuild() {
return artifactsToBuild;
}
public ImmutableSet<ConfiguredTarget> getExclusiveTests() {
return exclusiveTests;
}
public ImmutableSet<ConfiguredTarget> getParallelTests() {
return parallelTests;
}
/**
* Returns an error description (if any).
*/
@Nullable public String getError() {
return error;
}
public boolean hasError() {
return error != null;
}
/**
* Returns the action graph.
*/
public ActionGraph getActionGraph() {
return actionGraph;
}
public TopLevelArtifactContext getTopLevelContext() {
return topLevelContext;
}
}
/**
* Returns the collection of configured targets corresponding to any of the provided targets.
*/
@VisibleForTesting
static Iterable<? extends ConfiguredTarget> filterTestsByTargets(
Collection<? extends ConfiguredTarget> targets,
final Set<? extends Target> allowedTargets) {
return Iterables.filter(
targets,
new Predicate<ConfiguredTarget>() {
@Override
public boolean apply(ConfiguredTarget rule) {
return allowedTargets.contains(rule.getTarget());
}
});
}
private void prepareToBuild(PackageRootResolver resolver) throws ViewCreationFailedException {
for (BuildConfiguration config : configurations.getAllConfigurations()) {
config.prepareToBuild(directories.getExecRoot(), getArtifactFactory(), resolver);
}
}
@ThreadCompatible
public AnalysisResult update(
LoadingResult loadingResult,
BuildConfigurationCollection configurations,
List<String> aspects,
Options viewOptions,
TopLevelArtifactContext topLevelOptions,
EventHandler eventHandler,
EventBus eventBus)
throws ViewCreationFailedException, InterruptedException {
LOG.info("Starting analysis");
pollInterruptedStatus();
skyframeBuildView.resetEvaluatedConfiguredTargetKeysSet();
Collection<Target> targets = loadingResult.getTargets();
eventBus.post(new AnalysisPhaseStartedEvent(targets));
skyframeCacheWasInvalidated = false;
// Clear all cached ConfiguredTargets on configuration change. We need to do this explicitly
// because we need to make sure that the legacy action graph does not contain multiple actions
// with different versions of the same (target/host/etc.) configuration.
// In the future the action graph will be probably be keyed by configurations, which should
// obviate the need for this workaround.
//
// Also if --discard_analysis_cache was used in the last build we want to clear the legacy
// data.
if ((this.configurations != null && !configurations.equals(this.configurations))
|| skyframeAnalysisWasDiscarded) {
LOG.info("Discarding analysis cache: configurations have changed.");
skyframeExecutor.dropConfiguredTargets();
skyframeCacheWasInvalidated = true;
clear();
}
skyframeAnalysisWasDiscarded = false;
ImmutableMap<PackageIdentifier, Path> packageRoots = loadingResult.getPackageRoots();
this.configurations = configurations;
skyframeBuildView.setTopLevelHostConfiguration(this.configurations.getHostConfiguration());
setArtifactRoots(packageRoots);
// Determine the configurations.
List<TargetAndConfiguration> nodes = nodesForTargets(targets);
List<ConfiguredTargetKey> targetSpecs =
Lists.transform(nodes, new Function<TargetAndConfiguration, ConfiguredTargetKey>() {
@Override
public ConfiguredTargetKey apply(TargetAndConfiguration node) {
return new ConfiguredTargetKey(node.getLabel(), node.getConfiguration());
}
});
List<AspectKey> aspectKeys = new ArrayList<>();
for (String aspect : aspects) {
@SuppressWarnings("unchecked")
final Class<? extends ConfiguredAspectFactory> aspectFactoryClass =
(Class<? extends ConfiguredAspectFactory>)
ruleClassProvider.getAspectFactoryMap().get(aspect);
if (aspectFactoryClass != null) {
for (ConfiguredTargetKey targetSpec : targetSpecs) {
aspectKeys.add(
AspectValue.createAspectKey(
targetSpec.getLabel(), targetSpec.getConfiguration(), aspectFactoryClass));
}
} else {
throw new ViewCreationFailedException("Aspect '" + aspect + "' is unknown");
}
}
prepareToBuild(new SkyframePackageRootResolver(skyframeExecutor));
skyframeExecutor.injectWorkspaceStatusData();
SkyframeAnalysisResult skyframeAnalysisResult;
try {
skyframeAnalysisResult =
skyframeBuildView.configureTargets(
targetSpecs, aspectKeys, eventBus, viewOptions.keepGoing);
} finally {
skyframeBuildView.clearInvalidatedConfiguredTargets();
}
int numTargetsToAnalyze = nodes.size();
int numSuccessful = skyframeAnalysisResult.getConfiguredTargets().size();
boolean analysisSuccessful = (numSuccessful == numTargetsToAnalyze);
if (0 < numSuccessful && numSuccessful < numTargetsToAnalyze) {
String msg = String.format("Analysis succeeded for only %d of %d top-level targets",
numSuccessful, numTargetsToAnalyze);
eventHandler.handle(Event.info(msg));
LOG.info(msg);
}
AnalysisResult result =
createResult(
loadingResult,
topLevelOptions,
viewOptions,
skyframeAnalysisResult.getConfiguredTargets(),
skyframeAnalysisResult.getAspects(),
skyframeAnalysisResult.getWalkableGraph(),
analysisSuccessful);
LOG.info("Finished analysis");
return result;
}
private AnalysisResult createResult(
LoadingResult loadingResult,
TopLevelArtifactContext topLevelOptions,
BuildView.Options viewOptions,
Collection<ConfiguredTarget> configuredTargets,
Collection<AspectValue> aspects,
final WalkableGraph graph,
boolean analysisSuccessful)
throws InterruptedException {
Collection<Target> testsToRun = loadingResult.getTestsToRun();
Collection<ConfiguredTarget> allTargetsToTest = null;
if (testsToRun != null) {
// Determine the subset of configured targets that are meant to be run as tests.
allTargetsToTest = Lists.newArrayList(
filterTestsByTargets(configuredTargets, Sets.newHashSet(testsToRun)));
}
Set<Artifact> artifactsToBuild = new HashSet<>();
Set<ConfiguredTarget> parallelTests = new HashSet<>();
Set<ConfiguredTarget> exclusiveTests = new HashSet<>();
// build-info and build-changelist.
Collection<Artifact> buildInfoArtifacts = skyframeExecutor.getWorkspaceStatusArtifacts();
Preconditions.checkState(buildInfoArtifacts.size() == 2, buildInfoArtifacts);
artifactsToBuild.addAll(buildInfoArtifacts);
// Extra actions
addExtraActionsIfRequested(viewOptions, artifactsToBuild, configuredTargets);
// Coverage
NestedSet<Artifact> baselineCoverageArtifacts = getBaselineCoverageArtifacts(configuredTargets);
Iterables.addAll(artifactsToBuild, baselineCoverageArtifacts);
if (coverageReportActionFactory != null) {
CoverageReportActionsWrapper actionsWrapper;
actionsWrapper = coverageReportActionFactory.createCoverageReportActionsWrapper(
allTargetsToTest,
baselineCoverageArtifacts,
artifactFactory,
CoverageReportValue.ARTIFACT_OWNER);
if (actionsWrapper != null) {
ImmutableList <Action> actions = actionsWrapper.getActions();
skyframeExecutor.injectCoverageReportData(actions);
artifactsToBuild.addAll(actionsWrapper.getCoverageOutputs());
}
}
// Tests. This must come last, so that the exclusive tests are scheduled after everything else.
scheduleTestsIfRequested(parallelTests, exclusiveTests, topLevelOptions, allTargetsToTest);
String error = !loadingResult.hasLoadingError()
? (analysisSuccessful
? null
: "execution phase succeeded, but not all targets were analyzed")
: "execution phase succeeded, but there were loading phase errors";
final ActionGraph actionGraph = new ActionGraph() {
@Nullable
@Override
public Action getGeneratingAction(Artifact artifact) {
ArtifactOwner artifactOwner = artifact.getArtifactOwner();
if (artifactOwner instanceof ActionLookupValue.ActionLookupKey) {
SkyKey key = ActionLookupValue.key((ActionLookupValue.ActionLookupKey) artifactOwner);
ActionLookupValue val = (ActionLookupValue) graph.getValue(key);
return val == null ? null : val.getGeneratingAction(artifact);
}
return null;
}
};
return new AnalysisResult(
configuredTargets,
aspects,
allTargetsToTest,
error,
actionGraph,
artifactsToBuild,
parallelTests,
exclusiveTests,
topLevelOptions);
}
private static NestedSet<Artifact> getBaselineCoverageArtifacts(
Collection<ConfiguredTarget> configuredTargets) {
NestedSetBuilder<Artifact> baselineCoverageArtifacts = NestedSetBuilder.stableOrder();
for (ConfiguredTarget target : configuredTargets) {
InstrumentedFilesProvider provider = target.getProvider(InstrumentedFilesProvider.class);
if (provider != null) {
baselineCoverageArtifacts.addTransitive(provider.getBaselineCoverageArtifacts());
}
}
return baselineCoverageArtifacts.build();
}
private void addExtraActionsIfRequested(BuildView.Options viewOptions,
Set<Artifact> artifactsToBuild, Iterable<ConfiguredTarget> topLevelTargets) {
NestedSetBuilder<ExtraArtifactSet> builder = NestedSetBuilder.stableOrder();
for (ConfiguredTarget topLevel : topLevelTargets) {
ExtraActionArtifactsProvider provider = topLevel.getProvider(
ExtraActionArtifactsProvider.class);
if (provider != null) {
if (viewOptions.extraActionTopLevelOnly) {
builder.add(ExtraArtifactSet.of(topLevel.getLabel(), provider.getExtraActionArtifacts()));
} else {
builder.addTransitive(provider.getTransitiveExtraActionArtifacts());
}
}
}
RegexFilter filter = viewOptions.extraActionFilter;
for (ExtraArtifactSet set : builder.build()) {
boolean filterMatches = filter == null || filter.isIncluded(set.getLabel().toString());
if (filterMatches) {
artifactsToBuild.addAll(set.getArtifacts());
}
}
}
private static void scheduleTestsIfRequested(Collection<ConfiguredTarget> targetsToTest,
Collection<ConfiguredTarget> targetsToTestExclusive, TopLevelArtifactContext topLevelOptions,
Collection<ConfiguredTarget> allTestTargets) {
Set<String> outputGroups = topLevelOptions.outputGroups();
if (!outputGroups.contains(OutputGroupProvider.FILES_TO_COMPILE)
&& !outputGroups.contains(OutputGroupProvider.COMPILATION_PREREQUISITES)
&& allTestTargets != null) {
scheduleTests(targetsToTest, targetsToTestExclusive, allTestTargets,
topLevelOptions.runTestsExclusively());
}
}
/**
* Returns set of artifacts representing test results, writing into targetsToTest and
* targetsToTestExclusive.
*/
private static void scheduleTests(Collection<ConfiguredTarget> targetsToTest,
Collection<ConfiguredTarget> targetsToTestExclusive,
Collection<ConfiguredTarget> allTestTargets,
boolean isExclusive) {
for (ConfiguredTarget target : allTestTargets) {
if (target.getTarget() instanceof Rule) {
boolean exclusive =
isExclusive || TargetUtils.isExclusiveTestRule((Rule) target.getTarget());
Collection<ConfiguredTarget> testCollection = exclusive
? targetsToTestExclusive
: targetsToTest;
testCollection.add(target);
}
}
}
@VisibleForTesting
List<TargetAndConfiguration> nodesForTargets(Collection<Target> targets) {
// We use a hash set here to remove duplicate nodes; this can happen for input files and package
// groups.
LinkedHashSet<TargetAndConfiguration> nodes = new LinkedHashSet<>(targets.size());
for (BuildConfiguration config : configurations.getTargetConfigurations()) {
for (Target target : targets) {
nodes.add(new TargetAndConfiguration(target,
BuildConfigurationCollection.configureTopLevelTarget(config, target)));
}
}
return ImmutableList.copyOf(nodes);
}
/**
* Returns an existing ConfiguredTarget for the specified target and
* configuration, or null if none exists. No validity check is done.
*/
@ThreadSafe
public ConfiguredTarget getExistingConfiguredTarget(Target target, BuildConfiguration config) {
return getExistingConfiguredTarget(target.getLabel(), config);
}
/**
* Returns an existing ConfiguredTarget for the specified node, or null if none exists. No
* validity check is done.
*/
@ThreadSafe
private ConfiguredTarget getExistingConfiguredTarget(
Label label, BuildConfiguration configuration) {
return Iterables.getFirst(
skyframeExecutor.getConfiguredTargets(
configuration, ImmutableList.of(new Dependency(label, configuration)), true),
null);
}
@VisibleForTesting
ListMultimap<Attribute, ConfiguredTarget> getPrerequisiteMapForTesting(ConfiguredTarget target)
throws InterruptedException {
DependencyResolver resolver = new DependencyResolver() {
@Override
protected void invalidVisibilityReferenceHook(TargetAndConfiguration node, Label label) {
throw new RuntimeException("bad visibility on " + label + " during testing unexpected");
}
@Override
protected void invalidPackageGroupReferenceHook(TargetAndConfiguration node, Label label) {
throw new RuntimeException("bad package group on " + label + " during testing unexpected");
}
@Override
protected Target getTarget(Label label) throws NoSuchThingException {
return packageManager.getLoadedTarget(label);
}
};
TargetAndConfiguration ctNode = new TargetAndConfiguration(target);
ListMultimap<Attribute, Dependency> depNodeNames;
try {
depNodeNames = resolver.dependentNodeMap(ctNode, configurations.getHostConfiguration(),
/*aspect=*/null, AspectParameters.EMPTY, getConfigurableAttributeKeys(ctNode));
} catch (EvalException e) {
throw new IllegalStateException(e);
}
ImmutableMap<Dependency, ConfiguredTarget> cts = skyframeExecutor.getConfiguredTargetMap(
ctNode.getConfiguration(), ImmutableSet.copyOf(depNodeNames.values()), false);
ImmutableListMultimap.Builder<Attribute, ConfiguredTarget> builder =
ImmutableListMultimap.builder();
for (Map.Entry<Attribute, Dependency> entry : depNodeNames.entries()) {
builder.put(entry.getKey(), cts.get(entry.getValue()));
}
return builder.build();
}
/**
* Sets the possible artifact roots in the artifact factory. This allows the
* factory to resolve paths with unknown roots to artifacts.
* <p>
* <em>Note: This must be called before any call to
* {@link #getConfiguredTarget(Label, BuildConfiguration)}
* </em>
*/
@VisibleForTesting // for BuildViewTestCase
public void setArtifactRoots(ImmutableMap<PackageIdentifier, Path> packageRoots) {
Map<Path, Root> rootMap = new HashMap<>();
Map<PackageIdentifier, Root> realPackageRoots = new HashMap<>();
for (Map.Entry<PackageIdentifier, Path> entry : packageRoots.entrySet()) {
Root root = rootMap.get(entry.getValue());
if (root == null) {
root = Root.asSourceRoot(entry.getValue());
rootMap.put(entry.getValue(), root);
}
realPackageRoots.put(entry.getKey(), root);
}
// Source Artifact roots:
artifactFactory.setPackageRoots(realPackageRoots);
// Derived Artifact roots:
ImmutableList.Builder<Root> roots = ImmutableList.builder();
// build-info.txt and friends; this root is not configuration specific.
roots.add(directories.getBuildDataDirectory());
// The roots for each configuration - duplicates are automatically removed in the call below.
for (BuildConfiguration cfg : configurations.getAllConfigurations()) {
roots.addAll(cfg.getRoots());
}
artifactFactory.setDerivedArtifactRoots(roots.build());
}
/**
* Returns a configured target for the specified target and configuration.
* This should only be called from test cases, and is needed, because
* plain {@link #getConfiguredTarget(Target, BuildConfiguration)} does not
* construct the configured target graph, and would thus fail if called from
* outside an update.
*/
@VisibleForTesting
public ConfiguredTarget getConfiguredTargetForTesting(Label label, BuildConfiguration config)
throws NoSuchPackageException, NoSuchTargetException {
return getConfiguredTargetForTesting(packageManager.getLoadedTarget(label), config);
}
@VisibleForTesting
public ConfiguredTarget getConfiguredTargetForTesting(Target target, BuildConfiguration config) {
return skyframeExecutor.getConfiguredTargetForTesting(target.getLabel(), config);
}
/**
* Returns a RuleContext which is the same as the original RuleContext of the target parameter.
*/
@VisibleForTesting
public RuleContext getRuleContextForTesting(
ConfiguredTarget target, StoredEventHandler eventHandler) throws InterruptedException {
BuildConfiguration config = target.getConfiguration();
CachingAnalysisEnvironment analysisEnvironment =
new CachingAnalysisEnvironment(artifactFactory,
new ConfiguredTargetKey(target.getLabel(), config),
/*isSystemEnv=*/false, config.extendedSanityChecks(), eventHandler,
/*skyframeEnv=*/null, config.isActionsEnabled(), binTools);
return new RuleContext.Builder(analysisEnvironment,
(Rule) target.getTarget(), config, configurations.getHostConfiguration(),
ruleClassProvider.getPrerequisiteValidator())
.setVisibility(NestedSetBuilder.<PackageSpecification>create(
Order.STABLE_ORDER, PackageSpecification.EVERYTHING))
.setPrerequisites(getPrerequisiteMapForTesting(target))
.setConfigConditions(ImmutableSet.<ConfigMatchingProvider>of())
.build();
}
/**
* Creates and returns a rule context that is equivalent to the one that was used to create the
* given configured target.
*/
@VisibleForTesting
public RuleContext getRuleContextForTesting(ConfiguredTarget target, AnalysisEnvironment env)
throws InterruptedException {
BuildConfiguration targetConfig = target.getConfiguration();
return new RuleContext.Builder(
env, (Rule) target.getTarget(), targetConfig, configurations.getHostConfiguration(),
ruleClassProvider.getPrerequisiteValidator())
.setVisibility(NestedSetBuilder.<PackageSpecification>create(
Order.STABLE_ORDER, PackageSpecification.EVERYTHING))
.setPrerequisites(getPrerequisiteMapForTesting(target))
.setConfigConditions(ImmutableSet.<ConfigMatchingProvider>of())
.build();
}
/**
* For a configured target dependentTarget, returns the desired configured target
* that is depended upon. Useful for obtaining the a target with aspects
* required by the dependent.
*/
@VisibleForTesting
public ConfiguredTarget getPrerequisiteConfiguredTargetForTesting(
ConfiguredTarget dependentTarget, ConfiguredTarget desiredTarget)
throws InterruptedException {
Collection<ConfiguredTarget> configuredTargets =
getPrerequisiteMapForTesting(dependentTarget).values();
for (ConfiguredTarget ct : configuredTargets) {
if (ct.getLabel().equals(desiredTarget.getLabel())) {
return ct;
}
}
return null;
}
/**
* Tests and clears the current thread's pending "interrupted" status, and
* throws InterruptedException iff it was set.
*/
protected final void pollInterruptedStatus() throws InterruptedException {
if (Thread.interrupted()) {
throw new InterruptedException();
}
}
/**
* Drops the analysis cache. If building with Skyframe, targets in {@code topLevelTargets} may
* remain in the cache for use during the execution phase.
*
* @see BuildView.Options#discardAnalysisCache
*/
public void clearAnalysisCache(Collection<ConfiguredTarget> topLevelTargets) {
// TODO(bazel-team): Consider clearing packages too to save more memory.
skyframeAnalysisWasDiscarded = true;
skyframeExecutor.clearAnalysisCache(topLevelTargets);
}
}
| |
package com.example.bluetoothgatt;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothManager;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
/**
* Created by Dave Smith
* Double Encore, Inc.
* BeaconActivity
*/
public class BeaconKitKatActivity extends Activity implements BluetoothAdapter.LeScanCallback {
private static final String TAG = "BeaconActivity";
private BluetoothAdapter mBluetoothAdapter;
/* Collect unique devices discovered, keyed by address */
private HashMap<String, TemperatureBeacon> mBeacons;
private BeaconAdapter mAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
setProgressBarIndeterminate(true);
/*
* We are going to display all the device beacons that we discover
* in a list, using a custom adapter implementation
*/
ListView list = new ListView(this);
mAdapter = new BeaconAdapter(this);
list.setAdapter(mAdapter);
setContentView(list);
/*
* Bluetooth in Android 4.3 is accessed via the BluetoothManager, rather than
* the old static BluetoothAdapter.getInstance()
*/
BluetoothManager manager = (BluetoothManager) getSystemService(BLUETOOTH_SERVICE);
mBluetoothAdapter = manager.getAdapter();
mBeacons = new HashMap<String, TemperatureBeacon>();
}
@Override
protected void onResume() {
super.onResume();
/*
* We need to enforce that Bluetooth is first enabled, and take the
* user to settings to enable it if they have not done so.
*/
if (mBluetoothAdapter == null || !mBluetoothAdapter.isEnabled()) {
//Bluetooth is disabled
Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivity(enableBtIntent);
finish();
return;
}
/*
* Check for Bluetooth LE Support. In production, our manifest entry will keep this
* from installing on these devices, but this will allow test devices or other
* sideloads to report whether or not the feature exists.
*/
if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH_LE)) {
Toast.makeText(this, "No LE Support.", Toast.LENGTH_SHORT).show();
finish();
return;
}
//Begin scanning for LE devices
startScan();
}
@Override
protected void onPause() {
super.onPause();
//Cancel any scans in progress
mHandler.removeCallbacks(mStopRunnable);
mHandler.removeCallbacks(mStartRunnable);
mBluetoothAdapter.stopLeScan(this);
}
private Runnable mStopRunnable = new Runnable() {
@Override
public void run() {
stopScan();
}
};
private Runnable mStartRunnable = new Runnable() {
@Override
public void run() {
startScan();
}
};
private void startScan() {
//Scan for devices advertising the thermometer service
mBluetoothAdapter.startLeScan(new UUID[] {TemperatureBeacon.THERM_SERVICE.getUuid()}, this);
setProgressBarIndeterminateVisibility(true);
mHandler.postDelayed(mStopRunnable, 5000);
}
private void stopScan() {
mBluetoothAdapter.stopLeScan(this);
setProgressBarIndeterminateVisibility(false);
mHandler.postDelayed(mStartRunnable, 2500);
}
/* BluetoothAdapter.LeScanCallback */
@Override
public void onLeScan(BluetoothDevice device, int rssi, byte[] scanRecord) {
Log.i(TAG, "New LE Device: " + device.getName() + " @ " + rssi);
/*
* We need to parse out of the AD structures from the scan record
*/
List<AdRecord> records = AdRecord.parseScanRecord(scanRecord);
if (records.size() == 0) {
Log.i(TAG, "Scan Record Empty");
} else {
Log.i(TAG, "Scan Record: "
+ TextUtils.join(",", records));
}
/*
* Create a new beacon from the list of obtains AD structures
* and pass it up to the main thread
*/
TemperatureBeacon beacon = new TemperatureBeacon(records, device.getAddress(), rssi);
mHandler.sendMessage(Message.obtain(null, 0, beacon));
}
/*
* We have a Handler to process scan results on the main thread,
* add them to our list adapter, and update the view
*/
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
TemperatureBeacon beacon = (TemperatureBeacon) msg.obj;
mBeacons.put(beacon.getName(), beacon);
mAdapter.setNotifyOnChange(false);
mAdapter.clear();
mAdapter.addAll(mBeacons.values());
mAdapter.notifyDataSetChanged();
}
};
/*
* A custom adapter implementation that displays the TemperatureBeacon
* element data in columns, and also varies the text color of each row
* by the temperature values of the beacon
*/
private static class BeaconAdapter extends ArrayAdapter<TemperatureBeacon> {
public BeaconAdapter(Context context) {
super(context, 0);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = LayoutInflater.from(getContext())
.inflate(R.layout.item_beacon_list, parent, false);
}
TemperatureBeacon beacon = getItem(position);
//Set color based on temperature
final int textColor = getTemperatureColor(beacon.getCurrentTemp());
TextView nameView = (TextView) convertView.findViewById(R.id.text_name);
nameView.setText(beacon.getName());
nameView.setTextColor(textColor);
TextView tempView = (TextView) convertView.findViewById(R.id.text_temperature);
tempView.setText(String.format("%.1f\u00B0C", beacon.getCurrentTemp()));
tempView.setTextColor(textColor);
TextView addressView = (TextView) convertView.findViewById(R.id.text_address);
addressView.setText(beacon.getAddress());
addressView.setTextColor(textColor);
TextView rssiView = (TextView) convertView.findViewById(R.id.text_rssi);
rssiView.setText(String.format("%ddBm", beacon.getSignal()));
rssiView.setTextColor(textColor);
return convertView;
}
private int getTemperatureColor(float temperature) {
//Color range from 0 - 40 degC
float clipped = Math.max(0f, Math.min(40f, temperature));
float scaled = ((40f - clipped) / 40f) * 255f;
int blue = Math.round(scaled);
int red = 255 - blue;
return Color.rgb(red, 0, blue);
}
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.server.handlers.resource;
import io.undertow.UndertowLogger;
import io.undertow.io.IoCallback;
import io.undertow.io.Sender;
import io.undertow.server.HttpServerExchange;
import io.undertow.util.DateUtils;
import io.undertow.util.ETag;
import io.undertow.util.MimeMappings;
import io.undertow.util.StatusCodes;
import org.xnio.IoUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.ByteBuffer;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
/**
* @author Stuart Douglas
*/
public class URLResource implements Resource, RangeAwareResource {
private final URL url;
private final URLConnection connection;
private final String path;
public URLResource(final URL url, final URLConnection connection, String path) {
this.url = url;
this.connection = connection;
this.path = path;
}
@Override
public String getPath() {
return path;
}
@Override
public Date getLastModified() {
return new Date(connection.getLastModified());
}
@Override
public String getLastModifiedString() {
return DateUtils.toDateString(getLastModified());
}
@Override
public ETag getETag() {
return null;
}
@Override
public String getName() {
String path = url.getPath();
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
int sepIndex = path.lastIndexOf("/");
if (sepIndex != -1) {
path = path.substring(sepIndex + 1);
}
return path;
}
@Override
public boolean isDirectory() {
Path file = getFilePath();
if (file != null) {
return Files.isDirectory(file);
} else if (url.getPath().endsWith("/")) {
return true;
}
return false;
}
@Override
public List<Resource> list() {
List<Resource> result = new LinkedList<>();
Path file = getFilePath();
try {
if (file != null) {
try(DirectoryStream<Path> stream = Files.newDirectoryStream(file)) {
for (Path child : stream) {
result.add(new URLResource(child.toUri().toURL(), connection, child.toString()));
}
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return result;
}
@Override
public String getContentType(final MimeMappings mimeMappings) {
final String fileName = getName();
int index = fileName.lastIndexOf('.');
if (index != -1 && index != fileName.length() - 1) {
return mimeMappings.getMimeType(fileName.substring(index + 1));
}
return null;
}
@Override
public void serve(Sender sender, HttpServerExchange exchange, IoCallback completionCallback) {
serveImpl(sender, exchange, -1, -1, false, completionCallback);
}
public void serveImpl(final Sender sender, final HttpServerExchange exchange, final long start, final long end, final boolean range, final IoCallback completionCallback) {
class ServerTask implements Runnable, IoCallback {
private InputStream inputStream;
private byte[] buffer;
long toSkip = start;
long remaining = end - start + 1;
@Override
public void run() {
if (range && remaining == 0) {
//we are done, just return
IoUtils.safeClose(inputStream);
completionCallback.onComplete(exchange, sender);
return;
}
if (inputStream == null) {
try {
inputStream = url.openStream();
} catch (IOException e) {
exchange.setResponseCode(StatusCodes.INTERNAL_SERVER_ERROR);
return;
}
buffer = new byte[1024];//TODO: we should be pooling these
}
try {
int res = inputStream.read(buffer);
if (res == -1) {
//we are done, just return
IoUtils.safeClose(inputStream);
completionCallback.onComplete(exchange, sender);
return;
}
int bufferStart = 0;
int length = res;
if (range && toSkip > 0) {
//skip to the start of the requested range
//not super efficient, but what can you do
while (toSkip > res) {
toSkip -= res;
res = inputStream.read(buffer);
if (res == -1) {
//we are done, just return
IoUtils.safeClose(inputStream);
completionCallback.onComplete(exchange, sender);
return;
}
}
bufferStart = (int) toSkip;
length -= toSkip;
toSkip = 0;
}
if (range && length > remaining) {
length = (int) remaining;
}
sender.send(ByteBuffer.wrap(buffer, bufferStart, length), this);
} catch (IOException e) {
onException(exchange, sender, e);
}
}
@Override
public void onComplete(final HttpServerExchange exchange, final Sender sender) {
if (exchange.isInIoThread()) {
exchange.dispatch(this);
} else {
run();
}
}
@Override
public void onException(final HttpServerExchange exchange, final Sender sender, final IOException exception) {
UndertowLogger.REQUEST_IO_LOGGER.ioException(exception);
IoUtils.safeClose(inputStream);
if (!exchange.isResponseStarted()) {
exchange.setResponseCode(StatusCodes.INTERNAL_SERVER_ERROR);
}
completionCallback.onException(exchange, sender, exception);
}
}
ServerTask serveTask = new ServerTask();
if (exchange.isInIoThread()) {
exchange.dispatch(serveTask);
} else {
serveTask.run();
}
}
@Override
public Long getContentLength() {
return (long) connection.getContentLength();
}
@Override
public String getCacheKey() {
return url.toString();
}
@Override
public File getFile() {
Path path = getFilePath();
return path != null ? path.toFile() : null;
}
@Override
public Path getFilePath() {
if (url.getProtocol().equals("file")) {
try {
return Paths.get(url.toURI());
} catch (URISyntaxException e) {
return null;
}
}
return null;
}
@Override
public File getResourceManagerRoot() {
return null;
}
@Override
public Path getResourceManagerRootPath() {
return null;
}
@Override
public URL getUrl() {
return url;
}
@Override
public void serveRange(Sender sender, HttpServerExchange exchange, long start, long end, IoCallback completionCallback) {
serveImpl(sender, exchange, start, end, true, completionCallback);
}
@Override
public boolean isRangeSupported() {
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.integration;
import kafka.utils.MockTime;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.LongSerializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.kstream.ForeachAction;
import org.apache.kafka.streams.kstream.GlobalKTable;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.ValueJoiner;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.test.IntegrationTest;
import org.apache.kafka.test.TestCondition;
import org.apache.kafka.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
@Category({IntegrationTest.class})
public class GlobalKTableIntegrationTest {
private static final int NUM_BROKERS = 1;
@ClassRule
public static final EmbeddedKafkaCluster CLUSTER =
new EmbeddedKafkaCluster(NUM_BROKERS);
private static volatile int testNo = 0;
private final MockTime mockTime = CLUSTER.time;
private final KeyValueMapper<String, Long, Long> keyMapper = new KeyValueMapper<String, Long, Long>() {
@Override
public Long apply(final String key, final Long value) {
return value;
}
};
private final ValueJoiner<Long, String, String> joiner = new ValueJoiner<Long, String, String>() {
@Override
public String apply(final Long value1, final String value2) {
return value1 + "+" + value2;
}
};
private StreamsBuilder builder;
private Properties streamsConfiguration;
private KafkaStreams kafkaStreams;
private String globalOne;
private String inputStream;
private String inputTable;
private final String globalStore = "globalStore";
private GlobalKTable<Long, String> globalTable;
private KStream<String, Long> stream;
private KTable<String, Long> table;
final Map<String, String> results = new HashMap<>();
private ForeachAction<String, String> foreachAction;
@Before
public void before() throws InterruptedException {
testNo++;
builder = new StreamsBuilder();
createTopics();
streamsConfiguration = new Properties();
final String applicationId = "globalOne-table-test-" + testNo;
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
streamsConfiguration
.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
streamsConfiguration.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
globalTable = builder.globalTable(Serdes.Long(), Serdes.String(), null, globalOne, globalStore);
stream = builder.stream(Serdes.String(), Serdes.Long(), inputStream);
table = builder.table(Serdes.String(), Serdes.Long(), inputTable, "table");
foreachAction = new ForeachAction<String, String>() {
@Override
public void apply(final String key, final String value) {
results.put(key, value);
}
};
}
@After
public void whenShuttingDown() throws IOException {
if (kafkaStreams != null) {
kafkaStreams.close();
}
IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration);
}
@Test
public void shouldKStreamGlobalKTableLeftJoin() throws Exception {
final KStream<String, String> streamTableJoin = stream.leftJoin(globalTable, keyMapper, joiner);
streamTableJoin.foreach(foreachAction);
produceInitialGlobalTableValues();
startStreams();
produceTopicValues(inputStream);
final Map<String, String> expected = new HashMap<>();
expected.put("a", "1+A");
expected.put("b", "2+B");
expected.put("c", "3+C");
expected.put("d", "4+D");
expected.put("e", "5+null");
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return results.equals(expected);
}
}, 30000L, "waiting for initial values");
produceGlobalTableValues();
final ReadOnlyKeyValueStore<Long, String> replicatedStore = kafkaStreams.store(globalStore, QueryableStoreTypes.<Long, String>keyValueStore());
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return "J".equals(replicatedStore.get(5L));
}
}, 30000, "waiting for data in replicated store");
produceTopicValues(inputStream);
expected.put("a", "1+F");
expected.put("b", "2+G");
expected.put("c", "3+H");
expected.put("d", "4+I");
expected.put("e", "5+J");
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return results.equals(expected);
}
}, 30000L, "waiting for final values");
}
@Test
public void shouldKStreamGlobalKTableJoin() throws Exception {
final KStream<String, String> streamTableJoin = stream.join(globalTable, keyMapper, joiner);
streamTableJoin.foreach(foreachAction);
produceInitialGlobalTableValues();
startStreams();
produceTopicValues(inputStream);
final Map<String, String> expected = new HashMap<>();
expected.put("a", "1+A");
expected.put("b", "2+B");
expected.put("c", "3+C");
expected.put("d", "4+D");
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return results.equals(expected);
}
}, 30000L, "waiting for initial values");
produceGlobalTableValues();
final ReadOnlyKeyValueStore<Long, String> replicatedStore = kafkaStreams.store(globalStore, QueryableStoreTypes.<Long, String>keyValueStore());
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return "J".equals(replicatedStore.get(5L));
}
}, 30000, "waiting for data in replicated store");
produceTopicValues(inputStream);
expected.put("a", "1+F");
expected.put("b", "2+G");
expected.put("c", "3+H");
expected.put("d", "4+I");
expected.put("e", "5+J");
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return results.equals(expected);
}
}, 30000L, "waiting for final values");
}
private void createTopics() throws InterruptedException {
inputStream = "input-stream-" + testNo;
inputTable = "input-table-" + testNo;
globalOne = "globalOne-" + testNo;
CLUSTER.createTopics(inputStream, inputTable);
CLUSTER.createTopic(globalOne, 2, 1);
}
private void startStreams() {
kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
kafkaStreams.start();
}
private void produceTopicValues(final String topic) throws java.util.concurrent.ExecutionException, InterruptedException {
IntegrationTestUtils.produceKeyValuesSynchronously(
topic,
Arrays.asList(
new KeyValue<>("a", 1L),
new KeyValue<>("b", 2L),
new KeyValue<>("c", 3L),
new KeyValue<>("d", 4L),
new KeyValue<>("e", 5L)),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
StringSerializer.class,
LongSerializer.class,
new Properties()),
mockTime);
}
private void produceInitialGlobalTableValues() throws java.util.concurrent.ExecutionException, InterruptedException {
IntegrationTestUtils.produceKeyValuesSynchronously(
globalOne,
Arrays.asList(
new KeyValue<>(1L, "A"),
new KeyValue<>(2L, "B"),
new KeyValue<>(3L, "C"),
new KeyValue<>(4L, "D")),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
LongSerializer.class,
StringSerializer.class,
new Properties()),
mockTime);
}
private void produceGlobalTableValues() throws java.util.concurrent.ExecutionException, InterruptedException {
IntegrationTestUtils.produceKeyValuesSynchronously(
globalOne,
Arrays.asList(
new KeyValue<>(1L, "F"),
new KeyValue<>(2L, "G"),
new KeyValue<>(3L, "H"),
new KeyValue<>(4L, "I"),
new KeyValue<>(5L, "J")),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
LongSerializer.class,
StringSerializer.class,
new Properties()),
mockTime);
}
}
| |
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package io.permazen.parse.expr;
import io.permazen.parse.ParseSession;
import java.lang.reflect.Array;
import java.util.List;
import java.util.Map;
/**
* Java expression operators.
*/
public enum Op {
// NOTE: operations without overridden apply() methods are handled in the corresponding parser class
// Array access
ARRAY_ACCESS(2, "[]") {
@Override
@SuppressWarnings({ "unchecked", "rawtypes" })
Value apply(ParseSession session, final Value targetValue, final Value itemValue) {
// Check null
final Object target = targetValue.checkNotNull(session, "array access");
// Handle map
if (target instanceof Map) {
final Map map = (Map)target;
final Object key = targetValue.get(session);
return new AbstractLValue() {
@Override
public Object get(ParseSession session) {
return map.get(key);
}
@Override
public void set(ParseSession session, Value value) {
final Object obj = value.get(session);
try {
map.put(key, obj);
} catch (RuntimeException e) {
throw new EvalException("invalid map put operation"
+ (e.getMessage() != null ? ": " + e.getMessage() : ""), e);
}
}
};
}
// Handle list
if (target instanceof List) {
final List list = (List)target;
final int index = itemValue.checkIntegral(session, "list index");
return new AbstractLValue() {
@Override
public Object get(ParseSession session) {
return list.get(index);
}
@Override
public void set(ParseSession session, Value value) {
final Object obj = value.get(session);
try {
list.set(index, obj);
} catch (RuntimeException e) {
throw new EvalException("invalid list set operation"
+ (e.getMessage() != null ? ": " + e.getMessage() : ""), e);
}
}
};
}
// Assume it must be an array
final int index = itemValue.checkIntegral(session, "array index");
return new AbstractLValue() {
@Override
public Object get(ParseSession session) {
try {
return Array.get(target, index);
} catch (IllegalArgumentException e) {
throw new EvalException("invalid array access operation on non-array of type `"
+ target.getClass().getName() + "'", e);
} catch (ArrayIndexOutOfBoundsException e) {
throw new EvalException("array index out of bounds"
+ (e.getMessage() != null ? ": " + e.getMessage() : ""), e);
}
}
@Override
public void set(ParseSession session, Value value) {
final Object obj = value.get(session);
try {
Array.set(target, index, obj);
} catch (IllegalArgumentException e) {
throw new EvalException("invalid array set operation"
+ (e.getMessage() != null ? ": " + e.getMessage() : ""), e);
} catch (ArrayIndexOutOfBoundsException e) {
throw new EvalException("array index out of bounds"
+ (e.getMessage() != null ? ": " + e.getMessage() : ""), e);
}
}
};
}
},
// Member access
MEMBER_ACCESS(2, "."),
// Invoke method
INVOKE_METHOD(2, "()"),
// Unary
POST_INCREMENT(1, "++"),
POST_DECREMENT(1, "--"),
PRE_INCREMENT(1, "++"),
PRE_DECREMENT(1, "--"),
UNARY_PLUS(1, "+") {
@Override
Value apply(ParseSession session, Value value) {
return new ConstValue(value.checkNumeric(session, "unary plus")); // note: returned value is not an L-value
}
},
UNARY_MINUS(1, "-") {
@Override
Value apply(ParseSession session, Value value) {
return value.negate(session);
}
},
LOGICAL_NOT(1, "!") {
@Override
Value apply(ParseSession session, Value value) {
return new ConstValue(!value.checkBoolean(session, "logical `not'"));
}
},
INVERT(1, "~") {
@Override
Value apply(ParseSession session, Value value) {
return value.invert(session);
}
},
// Cast
CAST(1, "()") {
// Handled by CastExprParser
},
// Multiplicative
MULTIPLY(2, "*") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.multiply(session, rhs);
}
},
DIVIDE(2, "/") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.divide(session, rhs);
}
},
MODULO(2, "%") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.mod(session, rhs);
}
},
// Additive
PLUS(2, "+") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.add(session, rhs);
}
},
MINUS(2, "-") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.subtract(session, rhs);
}
},
// Shift
LSHIFT(2, "<<") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.lshift(session, rhs);
}
},
RSHIFT(2, ">>") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.rshift(session, rhs);
}
},
URSHIFT(2, ">>>") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.urshift(session, rhs);
}
},
// Relational
LT(2, "<") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.compare(session, rhs, Value.LT);
}
},
GT(2, ">") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.compare(session, rhs, Value.GT);
}
},
LTEQ(2, "<=") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.compare(session, rhs, Value.LT | Value.EQ);
}
},
GTEQ(2, ">=") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.compare(session, rhs, Value.GT | Value.EQ);
}
},
INSTANCEOF(2, "instanceof") {
// Handled by InstanceofParser
},
// Equality
EQUAL(2, "==") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
final Object lval = lhs.get(session);
final Object rval = rhs.get(session);
if (lval == null || rval == null)
return new ConstValue(lval == rval);
if (lval instanceof Number || rval instanceof Number)
return lhs.compare(session, rhs, Value.EQ);
return new ConstValue(lval.equals(rval));
}
},
NOT_EQUAL(2, "!=") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return new ConstValue(!(Boolean)Op.EQUAL.apply(session, lhs, rhs).get(session));
}
},
// Bitwise
AND(2, "&") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.and(session, rhs);
}
},
OR(2, "|") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.or(session, rhs);
}
},
XOR(2, "^") {
@Override
Value apply(ParseSession session, Value lhs, Value rhs) {
return lhs.xor(session, rhs);
}
},
// Logical
LOGICAL_AND(2, "&&"),
LOGICAL_OR(2, "||"),
// Conditional
CONDITIONAL(3, "?:"),
// Assignment
EQUALS(2, "="),
PLUS_EQUALS(2, "+="),
MINUS_EQUALS(2, "-="),
MULTIPLY_EQUALS(2, "*="),
DIVIDE_EQUALS(2, "/="),
MODULO_EQUALS(2, "%="),
AND_EQUALS(2, "&="),
XOR_EQUALS(2, "^="),
OR_EQUALS(2, "|="),
LSHIFT_EQUALS(2, "<<="),
RSHIFT_EQUALS(2, ">>="),
URSHIFT_EQUALS(2, ">>>=");
// Fields
private final int arity;
private final String symbol;
// Constructors
Op(int arity, String symbol) {
this.arity = arity;
this.symbol = symbol;
}
// Methods
/**
* Get the arity of this symbol.
*
* @return symbol arity
*/
public int getArity() {
return this.arity;
}
/**
* Get the symbol associated with this operator.
*
* @return operator symbol
*/
public String getSymbol() {
return this.symbol;
}
/**
* Apply this operator to the given parameters.
*
* @param session current session
* @param args operator arguments
* @return result of operation
* @throws IllegalArgumentException if {@code args} contains inappropriate value(s)
* @throws IllegalArgumentException if the length of {@code args} does not match this operator
*/
public Value apply(ParseSession session, Value... args) {
if (args.length != this.arity)
throw new EvalException("wrong number of arguments " + args.length + " != " + this.arity + " given to " + this);
switch (args.length) {
case 1:
return this.apply(session, args[0]);
case 2:
return this.apply(session, args[0], args[1]);
case 3:
return this.apply(session, args[0], args[1], args[2]);
default:
throw new RuntimeException("internal error");
}
}
/**
* Get the {@link Op} corresponding to the given symbol.
* Note: some symbols correspond to multiple {@link Op}s:
* <ul>
* <li>For {@code +} or {@code -}, the binary operator is returned</li>
* <li>For {@code ++} or {@code --}, the post-increment operator is returned</li>
* </ul>
*
* @param symbol symbol
* @return corresponding operator
* @throws IllegalArgumentException if no such {@link Op} exists
*/
public static Op forSymbol(String symbol) {
switch (symbol) {
case "+":
return Op.PLUS;
case "-":
return Op.MINUS;
case "++":
return Op.POST_INCREMENT;
case "--":
return Op.POST_DECREMENT;
default:
for (Op op : Op.values()) {
if (op.symbol.equals(symbol))
return op;
}
throw new IllegalArgumentException("no operation with symbol `" + symbol + "' exists");
}
}
Value apply(ParseSession session, Value arg) {
throw new UnsupportedOperationException();
}
Value apply(ParseSession session, Value arg1, Value arg2) {
throw new UnsupportedOperationException();
}
Value apply(ParseSession session, Value arg1, Value arg2, Value arg3) {
throw new UnsupportedOperationException();
}
}
| |
/*
* Copyright 2020 Hazelcast Inc.
*
* Licensed under the Hazelcast Community License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://hazelcast.com/hazelcast-community-license
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.hadoop.impl;
import com.hazelcast.function.BiFunctionEx;
import com.hazelcast.jet.JetException;
import com.hazelcast.jet.core.ProcessorMetaSupplier;
import com.hazelcast.jet.pipeline.file.AvroFileFormat;
import com.hazelcast.jet.pipeline.file.CsvFileFormat;
import com.hazelcast.jet.pipeline.file.FileFormat;
import com.hazelcast.jet.pipeline.file.JsonFileFormat;
import com.hazelcast.jet.pipeline.file.LinesTextFileFormat;
import com.hazelcast.jet.pipeline.file.ParquetFileFormat;
import com.hazelcast.jet.pipeline.file.RawBytesFileFormat;
import com.hazelcast.jet.pipeline.file.TextFileFormat;
import com.hazelcast.jet.pipeline.file.impl.FileSourceConfiguration;
import com.hazelcast.jet.pipeline.file.impl.FileSourceFactory;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericContainer;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.avro.mapreduce.AvroKeyInputFormat;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.specific.SpecificRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.parquet.avro.AvroParquetInputFormat;
import javax.annotation.Nonnull;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.ServiceLoader;
import static com.hazelcast.jet.hadoop.HadoopProcessors.readHadoopP;
import static com.hazelcast.jet.hadoop.HadoopSources.COPY_ON_READ;
import static com.hazelcast.jet.hadoop.impl.CsvInputFormat.CSV_INPUT_FORMAT_BEAN_CLASS;
import static com.hazelcast.jet.hadoop.impl.JsonInputFormat.JSON_INPUT_FORMAT_BEAN_CLASS;
import static java.util.Objects.requireNonNull;
/**
* Hadoop-based implementation of {@link FileSourceFactory}.
*/
public class HadoopFileSourceFactory implements FileSourceFactory {
private final Map<String, JobConfigurer> configurers;
/**
* Creates the HadoopSourceFactory.
*/
public HadoopFileSourceFactory() {
configurers = new HashMap<>();
addJobConfigurer(configurers, new AvroFormatJobConfigurer());
addJobConfigurer(configurers, new CsvFormatJobConfigurer());
addJobConfigurer(configurers, new JsonFormatJobConfigurer());
addJobConfigurer(configurers, new LineTextJobConfigurer());
addJobConfigurer(configurers, new ParquetFormatJobConfigurer());
addJobConfigurer(configurers, new RawBytesFormatJobConfigurer());
addJobConfigurer(configurers, new TextJobConfigurer());
ServiceLoader<JobConfigurer> loader = ServiceLoader.load(JobConfigurer.class);
for (JobConfigurer jobConfigurer : loader) {
addJobConfigurer(configurers, jobConfigurer);
}
}
private static void addJobConfigurer(Map<String, JobConfigurer> configurers, JobConfigurer configurer) {
configurers.put(configurer.format(), configurer);
}
@Nonnull
@Override
public <T> ProcessorMetaSupplier create(@Nonnull FileSourceConfiguration<T> fsc) {
try {
Job job = Job.getInstance();
Configuration configuration = job.getConfiguration();
configuration.setBoolean(FileInputFormat.INPUT_DIR_NONRECURSIVE_IGNORE_SUBDIRS, true);
configuration.setBoolean(FileInputFormat.INPUT_DIR_RECURSIVE, false);
for (Entry<String, String> option : fsc.getOptions().entrySet()) {
configuration.set(option.getKey(), option.getValue());
}
Path inputPath = getInputPath(fsc);
FileInputFormat.addInputPath(job, inputPath);
FileFormat<T> fileFormat = requireNonNull(fsc.getFormat());
JobConfigurer configurer = this.configurers.get(fileFormat.format());
if (configurer == null) {
throw new JetException("Could not find JobConfigurer for FileFormat: " + fileFormat.format() + ". " +
"Did you provide correct modules on classpath?");
}
configurer.configure(job, fileFormat);
return readHadoopP(SerializableConfiguration.asSerializable(configuration), configurer.projectionFn());
} catch (IOException e) {
throw new JetException("Could not create a source", e);
}
}
@Nonnull
private <T> Path getInputPath(FileSourceConfiguration<T> fsc) {
if (fsc.getGlob().equals("*")) {
// * means all files in the directory, but also all directories
// Hadoop interprets it as multiple input folders, resulting to processing files in 1st level
// subdirectories
return new Path(fsc.getPath());
} else {
return new Path(fsc.getPath() + File.separatorChar + fsc.getGlob());
}
}
private static class AvroFormatJobConfigurer implements JobConfigurer {
@Override
public <T> void configure(Job job, FileFormat<T> format) {
AvroFileFormat<T> avroFileFormat = (AvroFileFormat<T>) format;
job.setInputFormatClass(AvroKeyInputFormat.class);
Class<?> reflectClass = avroFileFormat.reflectClass();
if (reflectClass != null) {
Schema schema = ReflectData.get().getSchema(reflectClass);
AvroJob.setInputKeySchema(job, schema);
} else {
job.getConfiguration().setBoolean(COPY_ON_READ, Boolean.FALSE);
}
}
@Override
public BiFunctionEx<AvroKey<?>, NullWritable, ?> projectionFn() {
return (k, v) -> {
Object record = k.datum();
return record instanceof GenericContainer ? copy((GenericContainer) record) : record;
};
}
@Nonnull
@Override
public String format() {
return AvroFileFormat.FORMAT_AVRO;
}
}
private static class RawBytesFormatJobConfigurer implements JobConfigurer {
@Override
public <T> void configure(Job job, FileFormat<T> format) {
job.setInputFormatClass(WholeFileAsBytesInputFormat.class);
}
@Override
public BiFunctionEx<NullWritable, BytesWritable, byte[]> projectionFn() {
return (k, v) -> v.copyBytes();
}
@Nonnull
@Override
public String format() {
return RawBytesFileFormat.FORMAT_BIN;
}
}
private static class CsvFormatJobConfigurer implements JobConfigurer {
@Override
public <T> void configure(Job job, FileFormat<T> format) {
CsvFileFormat<T> csvFileFormat = (CsvFileFormat<T>) format;
job.setInputFormatClass(CsvInputFormat.class);
job.getConfiguration().setBoolean(COPY_ON_READ, Boolean.FALSE);
Class<?> clazz = csvFileFormat.clazz();
if (clazz != null) {
job.getConfiguration().set(CSV_INPUT_FORMAT_BEAN_CLASS, clazz.getCanonicalName());
}
}
@Override
public BiFunctionEx<NullWritable, ?, ?> projectionFn() {
return (k, v) -> v;
}
@Nonnull
@Override
public String format() {
return CsvFileFormat.FORMAT_CSV;
}
}
private static class JsonFormatJobConfigurer implements JobConfigurer {
@Override
public <T> void configure(Job job, FileFormat<T> format) {
JsonFileFormat<T> jsonFileFormat = (JsonFileFormat<T>) format;
job.setInputFormatClass(JsonInputFormat.class);
job.getConfiguration().setBoolean(COPY_ON_READ, Boolean.FALSE);
Class<?> clazz = jsonFileFormat.clazz();
if (clazz != null) {
job.getConfiguration().set(JSON_INPUT_FORMAT_BEAN_CLASS, clazz.getCanonicalName());
}
}
@Override
public BiFunctionEx<LongWritable, ?, ?> projectionFn() {
return (k, v) -> v;
}
@Nonnull
@Override
public String format() {
return JsonFileFormat.FORMAT_JSON;
}
}
private static class LineTextJobConfigurer implements JobConfigurer {
@Override
public <T> void configure(Job job, FileFormat<T> format) {
job.setInputFormatClass(TextInputFormat.class);
}
@Override
public BiFunctionEx<LongWritable, Text, String> projectionFn() {
return (k, v) -> v.toString();
}
@Nonnull
@Override
public String format() {
return LinesTextFileFormat.FORMAT_LINES;
}
}
private static class ParquetFormatJobConfigurer implements JobConfigurer {
@Override
public <T> void configure(Job job, FileFormat<T> format) {
job.setInputFormatClass(AvroParquetInputFormat.class);
job.getConfiguration().setBoolean(COPY_ON_READ, Boolean.FALSE);
}
@Override
public BiFunctionEx<String, ?, ?> projectionFn() {
return (k, record) -> {
if (record == null) {
return null;
} else if (record instanceof GenericContainer) {
return copy((GenericContainer) record);
} else {
throw new IllegalArgumentException("Unexpected record type: " + record.getClass());
}
};
}
@Nonnull
@Override
public String format() {
return ParquetFileFormat.FORMAT_PARQUET;
}
}
private static class TextJobConfigurer implements JobConfigurer {
@Override
public <T> void configure(Job job, FileFormat<T> format) {
job.setInputFormatClass(WholeFileAsTextInputFormat.class);
}
@Override
public BiFunctionEx<NullWritable, Text, String> projectionFn() {
return (k, v) -> v.toString();
}
@Nonnull
@Override
public String format() {
return TextFileFormat.FORMAT_TXT;
}
}
/**
* Copies Avro record.
*/
@SuppressWarnings("unchecked")
private static <T extends GenericContainer> T copy(T record) {
if (record instanceof SpecificRecord) {
SpecificRecord specificRecord = (SpecificRecord) record;
return (T) SpecificData.get().deepCopy(specificRecord.getSchema(), specificRecord);
} else if (record instanceof GenericRecord) {
GenericRecord genericRecord = (GenericRecord) record;
return (T) GenericData.get().deepCopy(genericRecord.getSchema(), genericRecord);
} else {
throw new IllegalArgumentException("Unexpected record type: " + record.getClass());
}
}
}
| |
package nl.ovapi.rid.gtfsrt.services;
import java.io.IOException;
import java.io.StringReader;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import lombok.NonNull;
import lombok.Setter;
import nl.ovapi.ZeroMQUtils;
import nl.ovapi.arnu.ARNUexporter;
import nl.ovapi.bison.BisonToGtfsUtils;
import nl.ovapi.bison.JourneyProcessor;
import nl.ovapi.bison.JourneyProcessor.Update;
import nl.ovapi.bison.KV78TurboExporter;
import nl.ovapi.bison.model.DataOwnerCode;
import nl.ovapi.bison.model.KV15message;
import nl.ovapi.bison.model.KV17cvlinfo;
import nl.ovapi.bison.model.KV6posinfo;
import nl.ovapi.bison.model.KV6posinfo.Type;
import nl.ovapi.bison.model.MessagePriority;
import nl.ovapi.bison.sax.KV15SAXHandler;
import nl.ovapi.bison.sax.KV17SAXHandler;
import nl.ovapi.bison.sax.KV6SAXHandler;
import nl.ovapi.exceptions.StopNotFoundException;
import nl.ovapi.exceptions.TooEarlyException;
import nl.ovapi.exceptions.TooOldException;
import nl.ovapi.exceptions.UnknownKV6PosinfoType;
import nl.ovapi.rid.gtfsrt.Utils;
import nl.ovapi.rid.model.Journey;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.onebusaway.gtfs_realtime.exporter.GtfsRealtimeGuiceBindingTypes.Alerts;
import org.onebusaway.gtfs_realtime.exporter.GtfsRealtimeGuiceBindingTypes.TripUpdates;
import org.onebusaway.gtfs_realtime.exporter.GtfsRealtimeGuiceBindingTypes.VehiclePositions;
import org.onebusaway.gtfs_realtime.exporter.GtfsRealtimeIncrementalUpdate;
import org.onebusaway.gtfs_realtime.exporter.GtfsRealtimeSink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import org.zeromq.ZMQ;
import org.zeromq.ZMQ.Context;
import org.zeromq.ZMQ.Socket;
import org.zeromq.ZMsg;
import com.google.common.collect.Maps;
import com.google.transit.realtime.GtfsRealtime.Alert.Builder;
import com.google.transit.realtime.GtfsRealtime.FeedEntity;
import com.google.transit.realtime.GtfsRealtime.TripUpdate;
@Singleton
public class BisonToGtfsRealtimeService {
private GtfsRealtimeSink _tripUpdatesSink;
private GtfsRealtimeSink _vehiclePositionsSink;
private GtfsRealtimeSink _alertsSink;
@Setter String pubAdress;
private ExecutorService _executor;
private Future<?> _task;
private ScheduledExecutorService _scheduler;
private GeometryService _geometryService;
private RIDservice _ridService;
private static final Logger _log = LoggerFactory.getLogger(BisonToGtfsRealtimeService.class);
private final static int GARBAGE_COLLECTOR_INTERVAL_SECONDS = 60;
private final static int POSINFO_MAX_AGE_SECONDS = 120;
private final static int TRIPUPDATE_EXPIRATION_HOURS = 1;
private ConcurrentMap<String, JourneyProcessor> journeyProcessors;
private ARNUexporter _arnuExporter;
private KV78TurboExporter _kv78TurboExporter;
@Inject
public void setKV78TurboExporter(KV78TurboExporter kv78TurboExporter) {
_kv78TurboExporter = kv78TurboExporter;
}
@Inject
public void setARNUexporter(ARNUexporter arnuExporter) {
_arnuExporter = arnuExporter;
}
@Inject
public void setTripUpdatesSink(@TripUpdates GtfsRealtimeSink tripUpdatesSink) {
_tripUpdatesSink = tripUpdatesSink;
}
@Inject
public void setAlertsSink(@Alerts GtfsRealtimeSink alertsSink) {
_alertsSink = alertsSink;
}
@Inject
public void setVehiclePositionsSink(@VehiclePositions GtfsRealtimeSink vehiclePositionsSink) {
_vehiclePositionsSink = vehiclePositionsSink;
}
@Inject
public void setGeometryService(GeometryService geometryService) {
_geometryService = geometryService;
}
@Inject
public void setRIDService(RIDservice ridService) {
_ridService = ridService;
}
private class ProcessKV15Task implements Runnable{
private ArrayList<KV15message> messages;
public ProcessKV15Task(ArrayList<KV15message> messages){
this.messages = messages;
}
@Override
public void run() {
GtfsRealtimeIncrementalUpdate update = new GtfsRealtimeIncrementalUpdate();
for (KV15message msg : messages){
try{
String id = String.format("KV15:%s:%s:%s", msg.getDataOwnerCode().name(),msg.getMessageCodeDate(),msg.getMessageCodeNumber());
if (msg.getIsDelete()){
update.addDeletedEntity(id);
_log.info("Deleted KV15 {} : {}",id,msg);
continue;
}
if (msg.getMessagePriority() == MessagePriority.COMMERCIAL && msg.getDataOwnerCode() != DataOwnerCode.QBUZZ){
_log.info("Ignore KV15 {}",msg);
continue;
}
FeedEntity.Builder entity = FeedEntity.newBuilder();
entity.setId(id);
Builder alert = BisonToGtfsUtils.translateKV15ToGTFSRT(msg, _ridService);
if (alert.getInformedEntityCount() > 0){
_log.info("Add KV15 {} : {}",id,msg);
entity.setAlert(alert);
update.addUpdatedEntity(entity.build());
}else{
_log.info("Ignore KV15, not entities found{}",msg);
}
}catch (Exception e){
_log.error("Processing KV15 {}",msg,e);
}
}
if (update.getDeletedEntities().size() > 0 || update.getUpdatedEntities().size() > 0)
_alertsSink.handleIncrementalUpdate(update);
}
}
private JourneyProcessor getOrCreateProcessorForId(@NonNull String privateCode){
JourneyProcessor jp = journeyProcessors.get(privateCode);
if (jp != null){
return jp;
}
Journey journey = _ridService.getJourney(privateCode);
if (journey == null){
//_log.info("Journey {} not found",privateCode);
return null; //Journey not found
}
jp = new JourneyProcessor(journey);
journeyProcessors.put(privateCode, jp);
return jp;
}
@PostConstruct
public void start() {
TimeZone.setDefault(TimeZone.getTimeZone("Europe/Amsterdam"));
journeyProcessors = Maps.newConcurrentMap();
_executor = Executors.newCachedThreadPool();
_scheduler = Executors.newScheduledThreadPool(5);
_task = _executor.submit(new ProcessTask());
_task = _executor.submit(new ReceiveTask());
try {
_executor.submit(new ProcessKV15Task(_ridService.getActiveKV15messages()));
} catch (SQLException e) {
e.printStackTrace();
}
_scheduler.scheduleAtFixedRate(new GarbageCollectorTask(), GARBAGE_COLLECTOR_INTERVAL_SECONDS, GARBAGE_COLLECTOR_INTERVAL_SECONDS, TimeUnit.SECONDS);
}
@PreDestroy
public void stop() {
if (_task != null) {
_task.cancel(true);
_task = null;
}
if (_executor != null) {
_executor.shutdownNow();
_executor = null;
}
if (_scheduler != null) {
_scheduler.shutdownNow();
_scheduler = null;
}
}
public void remove(ArrayList<String> removeIds){
if (removeIds.size() == 0){
return;
}
GtfsRealtimeIncrementalUpdate vehicleUpdates = new GtfsRealtimeIncrementalUpdate();
GtfsRealtimeIncrementalUpdate tripUpdates = new GtfsRealtimeIncrementalUpdate();
for (String id :removeIds){
vehicleUpdates.addDeletedEntity(id);
tripUpdates.addDeletedEntity(id);
}
_vehiclePositionsSink.handleIncrementalUpdate(vehicleUpdates);
_tripUpdatesSink.handleIncrementalUpdate(tripUpdates);
}
private class GarbageCollectorTask implements Runnable{
@Override
public void run() {
//Delete vehicle updates that haven't received KV6 in 2 minutes.
GtfsRealtimeIncrementalUpdate vehicleUpdates = new GtfsRealtimeIncrementalUpdate();
GtfsRealtimeIncrementalUpdate tripUpdates = new GtfsRealtimeIncrementalUpdate();
long threshold = Utils.currentTimeSecs() - POSINFO_MAX_AGE_SECONDS;
int vehiclesCleaned = 0;
int tripsCleaned = 0;
try{
//Scan for currently driving journey's with no or expired realtime information
long current_time = Utils.currentTimeSecs();
for (Journey j : _ridService.getAllJourneys()){
if (j.getDepartureEpoch() < current_time && j.getEndEpoch() > current_time){
Update update = null;
String id = j.getOperatingDay()+":"+j.getPrivateCode();
JourneyProcessor jp = getOrCreateProcessorForId(id);
if (jp != null && (jp.getPosinfo() == null || jp.getPosinfo().getTimestamp() < threshold)){
update = jp.setAsUnknown();
if (update != null
&& update.getChangedPasstimes() != null
&& update.getChangedPasstimes().size() > 0){
_kv78TurboExporter.export(update.getChangedPasstimes());
}
}
}
}
}catch (Exception e){
e.printStackTrace();
_log.error("Scanning for UNKNOWN's",e);
}
for (Entry<String, JourneyProcessor> entry : journeyProcessors.entrySet()){
JourneyProcessor jp = entry.getValue();
try{
if (jp.getPosinfo() != null && jp.getPosinfo().getTimestamp() < threshold){
vehicleUpdates.addDeletedEntity(getId(jp.getPosinfo(),null));
jp.clearKV6();
vehiclesCleaned += 1;
}
if (jp.getReinforcements() != null){
for (Entry<Integer, KV6posinfo> reinforcement : jp.getReinforcements().entrySet()){
if (reinforcement.getValue().getTimestamp() < threshold){
vehicleUpdates.addDeletedEntity(getId(reinforcement.getValue(),reinforcement.getValue().getReinforcementnumber()));
vehiclesCleaned += 1;
}
}
}
}catch (Exception e){
e.printStackTrace();
_log.error("Garbage Collection vehiclepositions {}",jp,e);
}
try{
if (jp.getEndEpoch() < (Utils.currentTimeSecs()-TRIPUPDATE_EXPIRATION_HOURS*60*60)){ //
tripUpdates.addDeletedEntity(entry.getKey());
//This is to avoid any JourneyProcessor's being removed while there is still a VehiclePosition stored
vehicleUpdates.addDeletedEntity(entry.getKey());
journeyProcessors.remove(entry.getKey());
tripsCleaned++;
_log.trace("Garbage cleaned {}",entry.getKey());
}
}catch (Exception e){
e.printStackTrace();
_log.error("Garbage Collection tripUpdates",e);
}
}
_log.error("GarbageCollector: {} vehicles cleaned, {} trips cleaned",vehiclesCleaned,tripsCleaned);
if (vehicleUpdates.getDeletedEntities().size() > 0 || vehicleUpdates.getUpdatedEntities().size() > 0)
_vehiclePositionsSink.handleIncrementalUpdate(vehicleUpdates);
if (tripUpdates.getDeletedEntities().size() > 0 || tripUpdates.getUpdatedEntities().size() > 0)
_tripUpdatesSink.handleIncrementalUpdate(tripUpdates);
}
}
private String getId(KV6posinfo posinfo,Integer reinforcementnumber){
if (posinfo.getDataownercode() == null){
_log.error("No DaOwCode {}",posinfo);
return null; //Find out how this can happen in the first place?
}
if (reinforcementnumber == null || reinforcementnumber == 0){
return String.format("%s:%s:%s:%s",
posinfo.getOperatingday(),
posinfo.getDataownercode().name(),
posinfo.getLineplanningnumber(),
posinfo.getJourneynumber());
}else{
return String.format("%s:%s:%s:%s:%s",
posinfo.getOperatingday(),
posinfo.getDataownercode().name(),
posinfo.getLineplanningnumber(),
posinfo.getJourneynumber(),
reinforcementnumber);
}
}
private class ProcessKV6Task implements Runnable{
private ArrayList<KV6posinfo> posinfos;
public ProcessKV6Task(ArrayList<KV6posinfo> posinfos){
this.posinfos = posinfos;
}
@Override
public void run() {
GtfsRealtimeIncrementalUpdate tripUpdates = new GtfsRealtimeIncrementalUpdate();
GtfsRealtimeIncrementalUpdate vehicleUpdates = new GtfsRealtimeIncrementalUpdate();
for (KV6posinfo posinfo : posinfos){
try{
if (posinfo.getLineplanningnumber() == null || "".equals(posinfo.getLineplanningnumber())){
continue;
}
String id = getId(posinfo,null);
JourneyProcessor jp = getOrCreateProcessorForId(id);
//TODO Fuzzy match for BISON Journey
if (jp == null){
LocalDate serviceDay = LocalDate.parse(posinfo.getOperatingday());
if (_ridService.getFromDate() > serviceDay.toDateTimeAtStartOfDay().getMillis()){
continue;
}
DateTime now = DateTime.now();
if (posinfo.getDataownercode() == DataOwnerCode.CXX && now.getHourOfDay() < 7){//Connexxion operday fuckup workaround
posinfo.setOperatingday(serviceDay.minusDays(1).toString());
id = getId(posinfo,null);
jp = getOrCreateProcessorForId(id);
}
if (jp == null){ //Double check for the CXX workaround
_log.info("Journey {} not found",id);
continue; //Trip not in database
}
}
if (posinfo.getMessagetype() == Type.END){
if (posinfo.getReinforcementnumber() == 0)
jp.clearKV6(); //Primary vehicle finished
else if (jp.getReinforcements().containsKey(posinfo.getReinforcementnumber()))
jp.getReinforcements().remove(posinfo.getReinforcementnumber()); //Remove reinforcement
vehicleUpdates.addDeletedEntity(getId(posinfo,posinfo.getReinforcementnumber()));
}
FeedEntity vehiclePosition = jp.vehiclePosition(getId(posinfo,posinfo.getReinforcementnumber()),jp,posinfo,_ridService,_geometryService);
if (vehiclePosition != null){
vehicleUpdates.addUpdatedEntity(vehiclePosition);
if (posinfo.getReinforcementnumber() > 0){
jp.getReinforcements().put(posinfo.getReinforcementnumber(), posinfo);
}
}
if (posinfo.getReinforcementnumber() == 0){ //Primary vehicle, BISON can currently not yet support schedules for reinforcments
try{
Update update = jp.update(posinfo);
if (update != null){
if (update.getChangedPasstimes() != null && _kv78TurboExporter != null){
_kv78TurboExporter.export(update.getChangedPasstimes());
}
if (update.getServiceInfo() != null && _arnuExporter != null){
_arnuExporter.export(update.getServiceInfo());
}
if (update.getGtfsRealtimeTrip() != null){
TripUpdate.Builder tripUpdate = update.getGtfsRealtimeTrip();
FeedEntity.Builder tripEntity = FeedEntity.newBuilder();
tripEntity.setId(id);
tripEntity.setTripUpdate(tripUpdate); //Get update created from KV6
tripUpdates.addUpdatedEntity(tripEntity.build());
}
}
}catch (TooOldException e){
_log.info("Trip {} Too old: {}", id,posinfo);
}catch (StopNotFoundException e){
_log.info("Trip {} userstop {} not found", id,posinfo.getUserstopcode());
}catch (TooEarlyException e){
_log.trace("Trip {} punctuality too early {}", id,posinfo);
} catch (UnknownKV6PosinfoType e) {
_log.info("Trip {} unknown Posinfotype {}", id,posinfo);
}
}
}catch (Exception e){
e.printStackTrace(System.err);
_log.error("Exception {}",posinfo,e);
}
}
if (vehicleUpdates.getDeletedEntities().size() > 0 || vehicleUpdates.getUpdatedEntities().size() > 0)
_vehiclePositionsSink.handleIncrementalUpdate(vehicleUpdates);
if (tripUpdates.getDeletedEntities().size() > 0 || tripUpdates.getUpdatedEntities().size() > 0)
_tripUpdatesSink.handleIncrementalUpdate(tripUpdates);
}
}
private class ProcessKV17Task implements Runnable{
private ArrayList<KV17cvlinfo> cvlinfos;
public ProcessKV17Task(ArrayList<KV17cvlinfo> cvlinfos){
this.cvlinfos = cvlinfos;
}
@Override
public void run() {
HashMap<String,ArrayList<KV17cvlinfo>> map = new HashMap<String,ArrayList<KV17cvlinfo>>();
GtfsRealtimeIncrementalUpdate tripUpdates = new GtfsRealtimeIncrementalUpdate();
try{
for (KV17cvlinfo cvlinfo : cvlinfos){
String id = String.format("%s:%s:%s:%s", cvlinfo.getOperatingday(),cvlinfo.getDataownercode().name(),cvlinfo.getLineplanningnumber(),cvlinfo.getJourneynumber());
if (!map.containsKey(id)){
map.put(id, new ArrayList<KV17cvlinfo>());
}
map.get(id).add(cvlinfo);
}
for (String id : map.keySet()){
ArrayList<KV17cvlinfo> cvlinfos = map.get(id);
JourneyProcessor jp = getOrCreateProcessorForId(id);
if (jp == null){ //Double check for the CXX workaround
_log.info("Journey {} not found",id);
continue; //Trip not in database
}
Update update = jp.update(cvlinfos);
if (update != null){
if (update.getChangedPasstimes() != null){
_kv78TurboExporter.export(update.getChangedPasstimes());
}
if (update.getServiceInfo() != null){
_arnuExporter.export(update.getServiceInfo());
}
if (update.getGtfsRealtimeTrip() != null){
TripUpdate.Builder tripUpdate = update.getGtfsRealtimeTrip();
FeedEntity.Builder entity = FeedEntity.newBuilder();
entity.setTripUpdate(tripUpdate);
entity.setId(id);
tripUpdates.addUpdatedEntity(entity.build());
}
}
}
}catch (Exception e){
_log.error("ProcessKV17Task",e);
}
if (tripUpdates.getDeletedEntities().size() > 0 || tripUpdates.getUpdatedEntities().size() > 0)
_tripUpdatesSink.handleIncrementalUpdate(tripUpdates);
}
}
void process(ArrayList<KV6posinfo> posinfos){
_executor.submit(new ProcessKV6Task(posinfos));
}
public boolean startsWithBom(String line) {
char myChar = line.charAt(0);
int intValue = (int) myChar;
// Hexa value of BOM = EF BB BF => int 65279
if (intValue == 65279) {
return true;
} else {
return false;
}
}
private class ProcessTask implements Runnable {
int messagecounter = 0;
@Override
public void run() {
SAXParserFactory spf = SAXParserFactory.newInstance();
spf.setNamespaceAware(true);
SAXParser sp;
XMLReader xr = null;
try {sp = spf.newSAXParser();
xr = sp.getXMLReader();} catch (Exception e) {return;}
Context context = ZMQ.context(1);
Socket pull = context.socket(ZMQ.PULL);
pull.setRcvHWM(500000);
final String PULL_ADDRESS = "tcp://127.0.0.1:"+INPROC_PORT;
pull.connect(PULL_ADDRESS);
while (!Thread.interrupted()) {
messagecounter++;
if (messagecounter % 1000 == 0){
_log.debug(messagecounter + " BISON messages received");
}
try {
String[] m = ZeroMQUtils.gunzipMultifameZMsg(ZMsg.recvMsg(pull));
if (startsWithBom(m[1])){
m[1] = m[1].substring(1);
}
if (m[0].toLowerCase().endsWith("kv6posinfo")) {
InputSource s = new InputSource(new StringReader(m[1]));
s.setEncoding("UTF-8");
KV6SAXHandler handler = new KV6SAXHandler();
xr.setContentHandler(handler);
try {
xr.parse(s);
process(handler.getPosinfos());
} catch (Exception e) {
_log.error("KV6 parsing {}",m[1],e);
}
} else if (m[0].toLowerCase().endsWith("kv17cvlinfo")) {
InputSource s = new InputSource(new StringReader(m[1]));
s.setEncoding("UTF-8");
KV17SAXHandler handler = new KV17SAXHandler();
xr.setContentHandler(handler);
try {
xr.parse(s);
_executor.submit(new ProcessKV17Task(handler.getCvlinfos()));
} catch (Exception e) {
_log.error("KV17 parsing {}",m[1],e);
}
} else if (m[0].toLowerCase().endsWith("kv15messages")) {
InputSource s = new InputSource(new StringReader(m[1]));
s.setEncoding("UTF-8");
KV15SAXHandler handler = new KV15SAXHandler();
xr.setContentHandler(handler);
try {
xr.parse(s);
_executor.submit(new ProcessKV15Task(handler.getMessages()));
} catch (Exception e) {
_log.error("KV15 parsing {}",m[1]);
}
} else {
_log.error("Unknown URL {}",m[0]);
}
} catch (IOException e) {
e.printStackTrace();
}
}
_log.error("BisonToGtfsRealtime service interrupted");
pull.disconnect(PULL_ADDRESS);
}
}
private final static String INPROC_PORT = "51545";
private class ReceiveTask implements Runnable {
@Override
public void run() {
Context context = ZMQ.context(1);
Socket subscriber = context.socket(ZMQ.SUB);
subscriber.connect(pubAdress);
subscriber.subscribe("".getBytes());
Socket push = context.socket(ZMQ.PUSH);
push.setSndHWM(500000);
push.bind("tcp://*:"+INPROC_PORT);
_log.info("Connect to {}",pubAdress);
org.zeromq.ZMQ.Poller poller = context.poller();
poller.register(subscriber);
while (!Thread.interrupted()) {
if (poller.poll(TimeUnit.MINUTES.toMillis(5L)) > 0){
try{
ZMsg.recvMsg(subscriber).send(push);
} catch (Exception e) {
_log.error("Error in bison receiving",e);
e.printStackTrace();
}
}else{
subscriber.disconnect(pubAdress);
subscriber.connect(pubAdress);
_log.error("Connection to {} lost, reconnecting",pubAdress);
subscriber.subscribe("".getBytes());
}
}
subscriber.disconnect(pubAdress);
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2006-2010 eBay Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*******************************************************************************/
package org.ebayopensource.turmeric.runtime.sif.impl.internal.config;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeSet;
import java.util.logging.Level;
import javax.xml.namespace.QName;
import org.ebayopensource.turmeric.runtime.binding.utils.CollectionUtils;
import org.ebayopensource.turmeric.runtime.common.impl.internal.config.CommonConfigHolder;
import org.ebayopensource.turmeric.runtime.common.impl.internal.config.ConfigUtils;
import org.ebayopensource.turmeric.runtime.common.impl.internal.config.MessageProcessorConfigHolder;
import org.ebayopensource.turmeric.runtime.common.impl.internal.config.MetadataPropertyConfigHolder;
import org.ebayopensource.turmeric.runtime.common.impl.internal.config.OperationPropertyConfigHolder;
import org.ebayopensource.turmeric.runtime.common.impl.internal.config.OptionList;
import org.ebayopensource.turmeric.runtime.common.impl.internal.config.TypeMappingConfigHolder;
import org.ebayopensource.turmeric.runtime.common.impl.utils.LogManager;
import org.ebayopensource.turmeric.runtime.common.monitoring.ErrorStatusOptions;
import org.ebayopensource.turmeric.runtime.common.monitoring.MonitoringLevel;
import org.ebayopensource.turmeric.runtime.common.pipeline.TransportOptions;
public class ClientConfigHolder extends CommonConfigHolder {
// Invocation options
// When adding properties, please ensure that copy(), dump(), and getters/setters
// are all covered for the new properties. Make sure setters call checkReadOnly().
private Integer m_appLevelNumRetries;
private Boolean m_useREST;
private Integer m_maxURLLengthForREST;
private String m_retryHandlerClass;
private Boolean m_markdownEnabled;
private Integer m_markdownErrCountThreshold;
private String m_markdownStateFactoryClass;
private String m_preferredEncoding;
private String m_preferredLocale;
private String m_requestDataBinding;
private String m_responseDataBinding;
private String m_preferredTransport;
private String m_serviceLocation;
private Map<String, String> m_serviceLocationMap = CollectionUtils.EMPTY_STRING_MAP;
private String m_wsdlLocation;
private String m_requestPayloadLog;
private String m_requestPayloadCalLog;
private String m_responsePayloadLog;
private String m_responsePayloadCalLog;
private TransportOptions m_transportOverrideOptions = new TransportOptions();
private Map<String, String> m_transportOverrideHeaderOptions = new HashMap<String, String>();
private String m_serviceVersion;
private String m_invocationUseCase;
private String m_consumerId;
private String m_preferredGlobalId;
private String m_responseTransport;
private String m_messageProtocol;
private String m_customErrorResponseAdapter;
private String m_errorDataProviderClass;
private String m_cacheProviderClass;
private Boolean m_disableCacheOnLocal = Boolean.TRUE; // Default is to skip cache
private Boolean m_skipCacheOnError = Boolean.FALSE; // Default to continue on error
private Collection<String> m_retryTransportStatusCodes = CollectionUtils.EMPTY_STRING_SET;
private Collection<String> m_retryExceptionClasses = CollectionUtils.EMPTY_STRING_SET;
private Collection<String> m_retryErrorIds = CollectionUtils.EMPTY_STRING_SET;
private Collection<String> m_markdownTransportStatusCodes = CollectionUtils.EMPTY_STRING_SET;
private Collection<String> m_markdownExceptionClasses = CollectionUtils.EMPTY_STRING_SET;
private Collection<String> m_markdownErrorIds = CollectionUtils.EMPTY_STRING_SET;
private String m_clientName; // The name of the client, which was used as the folder name when looking up the config file.
private String m_envName; // the name of environemnt , which was used as a folder name for cc.xml. null value for old cc.xml
private String m_urlPathInfo;
private static final char NL = '\n';
private boolean m_ignoreServiceVersion = false;
public ClientConfigHolder(ClientConfigHolder configToCopy, String adminName, String clientName,String envName,QName serviceQName,
MessageProcessorConfigHolder messageProcessorConfig,
MetadataPropertyConfigHolder metadata,
TypeMappingConfigHolder typeMappings,
OperationPropertyConfigHolder operationProperties,
String serviceInterfaceClassName,
MonitoringLevel monitoringLevel,
OptionList requestHeaderMappingOptions,
OptionList responseHeaderMappingOptions,
ErrorStatusOptions errorStatusOptions) {
super(configToCopy, adminName, serviceQName, messageProcessorConfig,
metadata, typeMappings, operationProperties,
serviceInterfaceClassName, monitoringLevel,
requestHeaderMappingOptions, responseHeaderMappingOptions,
errorStatusOptions);
m_clientName = clientName == null ? configToCopy.m_clientName
: clientName;
m_envName = envName == null ? configToCopy.m_envName : envName;
m_readOnly = false;
m_appLevelNumRetries = configToCopy.m_appLevelNumRetries;
m_useREST = configToCopy.m_useREST;
m_maxURLLengthForREST = configToCopy.m_maxURLLengthForREST;
m_retryHandlerClass = configToCopy.m_retryHandlerClass;
m_markdownEnabled = configToCopy.m_markdownEnabled;
m_markdownErrCountThreshold = configToCopy.m_markdownErrCountThreshold;
m_markdownStateFactoryClass = configToCopy.m_markdownStateFactoryClass;
m_preferredEncoding = configToCopy.m_preferredEncoding;
m_preferredLocale = configToCopy.m_preferredLocale;
m_requestDataBinding = configToCopy.m_requestDataBinding;
m_responseDataBinding = configToCopy.m_responseDataBinding;
m_preferredTransport = configToCopy.m_preferredTransport;
m_serviceLocation = configToCopy.m_serviceLocation;
m_serviceLocationMap = new HashMap<String, String>(configToCopy.m_serviceLocationMap);
m_wsdlLocation = configToCopy.m_wsdlLocation;
m_requestPayloadLog = configToCopy.m_requestPayloadLog;
m_requestPayloadCalLog = configToCopy.m_requestPayloadCalLog;
m_responsePayloadLog = configToCopy.m_responsePayloadLog;
m_responsePayloadCalLog = configToCopy.m_responsePayloadCalLog;
m_transportOverrideOptions = ConfigUtils
.copyTransportOptions(configToCopy.m_transportOverrideOptions);
m_serviceVersion = configToCopy.m_serviceVersion;
m_invocationUseCase = configToCopy.m_invocationUseCase;
m_consumerId = configToCopy.m_consumerId;
m_preferredGlobalId = configToCopy.m_preferredGlobalId;
m_responseTransport = configToCopy.m_responseTransport;
m_messageProtocol = configToCopy.m_messageProtocol;
m_customErrorResponseAdapter = configToCopy.m_customErrorResponseAdapter;
m_retryTransportStatusCodes = new HashSet<String>(
configToCopy.m_retryTransportStatusCodes);
m_retryExceptionClasses = new HashSet<String>(configToCopy.m_retryExceptionClasses);
m_retryErrorIds = new HashSet<String>(configToCopy.m_retryErrorIds);
m_markdownTransportStatusCodes = new HashSet<String>(
configToCopy.m_markdownTransportStatusCodes);
m_markdownExceptionClasses = new HashSet<String>(
configToCopy.m_markdownExceptionClasses);
m_markdownErrorIds = new HashSet<String>(configToCopy.m_markdownErrorIds);
m_urlPathInfo = configToCopy.m_urlPathInfo;
m_cacheProviderClass = configToCopy.m_cacheProviderClass;
m_disableCacheOnLocal = configToCopy.m_disableCacheOnLocal;
m_skipCacheOnError = configToCopy.m_skipCacheOnError;
m_ignoreServiceVersion = true;
}
public ClientConfigHolder(String adminName, String clientName,String envName) {
super(adminName);
m_clientName = clientName;
m_envName = envName;
}
/*
* Create a deep copy of the object.
*/
public ClientConfigHolder copy() {
ClientConfigHolder newCH = new ClientConfigHolder(getAdminName(), m_clientName,m_envName);
newCH.m_readOnly = false;
newCH.copyMemberData(this);
newCH.m_appLevelNumRetries = m_appLevelNumRetries;
newCH.m_useREST = m_useREST;
newCH.m_maxURLLengthForREST = m_maxURLLengthForREST;
newCH.m_retryHandlerClass = m_retryHandlerClass;
newCH.m_markdownEnabled = m_markdownEnabled;
newCH.m_markdownErrCountThreshold = m_markdownErrCountThreshold;
newCH.m_markdownStateFactoryClass = m_markdownStateFactoryClass;
newCH.m_preferredEncoding = m_preferredEncoding;
newCH.m_preferredLocale = m_preferredLocale;
newCH.m_requestDataBinding = m_requestDataBinding;
newCH.m_responseDataBinding = m_responseDataBinding;
newCH.m_preferredTransport = m_preferredTransport;
newCH.m_serviceLocation = m_serviceLocation;
newCH.m_serviceLocationMap = new HashMap<String, String>(m_serviceLocationMap);
newCH.m_wsdlLocation = m_wsdlLocation;
newCH.m_requestPayloadLog = m_requestPayloadLog;
newCH.m_requestPayloadCalLog = m_requestPayloadCalLog;
newCH.m_responsePayloadLog = m_responsePayloadLog;
newCH.m_responsePayloadCalLog = m_responsePayloadCalLog;
newCH.m_transportOverrideOptions = ConfigUtils.copyTransportOptions(m_transportOverrideOptions);
newCH.m_transportOverrideHeaderOptions = new HashMap<String, String>(m_transportOverrideHeaderOptions);
newCH.m_serviceVersion = m_serviceVersion;
newCH.m_invocationUseCase = m_invocationUseCase;
newCH.m_consumerId = m_consumerId;
newCH.m_preferredGlobalId = m_preferredGlobalId;
newCH.m_responseTransport = m_responseTransport;
newCH.m_messageProtocol = m_messageProtocol;
newCH.m_customErrorResponseAdapter = m_customErrorResponseAdapter;
newCH.m_retryTransportStatusCodes = new HashSet<String>(m_retryTransportStatusCodes);
newCH.m_retryExceptionClasses = new HashSet<String>(m_retryExceptionClasses);
newCH.m_retryErrorIds = new HashSet<String>(m_retryErrorIds);
newCH.m_markdownTransportStatusCodes = new HashSet<String>(m_markdownTransportStatusCodes);
newCH.m_markdownExceptionClasses = new HashSet<String>(m_markdownExceptionClasses);
newCH.m_markdownErrorIds = new HashSet<String>(m_markdownErrorIds);
newCH.m_urlPathInfo = m_urlPathInfo;
newCH.m_cacheProviderClass = m_cacheProviderClass;
newCH.m_disableCacheOnLocal = m_disableCacheOnLocal;
newCH.m_skipCacheOnError = m_skipCacheOnError;
return newCH;
}
/**
* Returns the application level number of retries configured.
*/
public Integer getAppLevelNumRetries() {
return m_appLevelNumRetries;
}
/**
* Sets the application level number of retries to attempt (per failed, retryable invocation)
* @param numRetries the m_numAppLevelRetries to set
*/
public void setAppLevelNumRetries(Integer numRetries) {
checkReadOnly();
m_appLevelNumRetries = numRetries;
}
/**
* Returns the m_useREST
*/
public Boolean getUseREST() {
return m_useREST;
}
/**
* @param useREST the m_useREST to set
*/
public void setUseREST(Boolean useREST) {
checkReadOnly();
m_useREST = useREST;
}
/**
* Returns the m_maxURLLengthForREST
*/
public Integer getMaxURLLengthForREST() {
return m_maxURLLengthForREST;
}
/**
* @param maxURLLengthForREST the m_maxURLLengthForREST to set
*/
public void setMaxURLLengthForREST(Integer maxURLLengthForREST) {
checkReadOnly();
m_maxURLLengthForREST = maxURLLengthForREST;
}
/**
* Returns the class name of the application retry handler
*/
public String getRetryHandlerClass() {
return m_retryHandlerClass;
}
/**
* Sets the class name of the application retry handler
* @param m_retryHandlerClass the name of the retry handler class to set
*/
public void setRetryHandlerClass(String retryHandlerClass) {
checkReadOnly();
m_retryHandlerClass = retryHandlerClass;
}
/**
* Returns the auto markdown error count threshold configured.
*/
public Boolean getMarkdownEnabled() {
return m_markdownEnabled;
}
/**
* Sets the auto markdown error count threshold
*/
public void setMarkdownEnabled(Boolean value) {
checkReadOnly();
m_markdownEnabled = value;
}
/**
* Returns the auto markdown error count threshold configured.
*/
public Integer getMarkdownErrCountThreshold() {
return m_markdownErrCountThreshold;
}
/**
* Sets the auto markdown error count threshold
*/
public void setMarkdownErrCountThreshold(Integer value) {
checkReadOnly();
m_markdownErrCountThreshold = value;
}
/**
* Returns the class name of the auto markdown state factory
*/
public String getMarkdownStateFactoryClass() {
return m_markdownStateFactoryClass;
}
/**
* Sets the class name of the auto markdown state factory
*/
public void setMarkdownStateFactoryClass(String value) {
checkReadOnly();
m_markdownStateFactoryClass = value;
}
/**
* Returns the preferred character set encoding to be used by this client
*/
public String getPreferredEncoding() {
return m_preferredEncoding;
}
/**
* Sets the preferred character set encoding to be used by this client
* @param encoding the encoding to set
*/
public void setPreferredEncoding(String encoding) {
checkReadOnly();
m_preferredEncoding = encoding;
}
/**
* Returns the preferred locale to be used by this client
*/
public String getPreferredLocale() {
return m_preferredLocale;
}
/**
* Sets the preferred locale to be used by this client
* @param locale the preferred locale
*/
public void setPreferredLocale(String locale) {
checkReadOnly();
m_preferredLocale = locale;
}
/**
* Returns the request data binding to be used by this client
*/
public String getRequestDataBinding() {
return m_requestDataBinding;
}
/**
* Sets the request data binding to be used by this client
* @param prefBinding the data binding
*/
public void setRequestDataBinding(String prefBinding) {
checkReadOnly();
m_requestDataBinding = prefBinding;
}
/**
* Returns the response data binding to be used by this client
*/
public String getResponseDataBinding() {
return m_responseDataBinding;
}
/**
* Sets the response data binding to be used by this client
* @param prefBinding the data binding
*/
public void setResponseDataBinding(String prefBinding) {
checkReadOnly();
m_responseDataBinding = prefBinding;
}
/**
* Returns the preferred transport to be used by this client
*/
public String getPreferredTransport() {
return m_preferredTransport;
}
/**
* Sets the preferred transport to be used by this client
* @param prefBinding the preferred transport
*/
public void setPreferredTransport(String prefTransport) {
checkReadOnly();
m_preferredTransport = prefTransport;
}
/**
* Returns the transport override options associated with
*/
public TransportOptions getTransportOverrideOptions() {
if (isReadOnly()) {
return ConfigUtils.copyTransportOptions(m_transportOverrideOptions);
}
return m_transportOverrideOptions;
}
public Map<String, String> getTransportOverrideHeaderOptions() {
if (isReadOnly()) {
return new HashMap<String, String>(m_transportOverrideHeaderOptions);
}
return m_transportOverrideHeaderOptions;
}
/**
* @param options the m_transportOverrideOptions to set
*/
public void setTransportOverrideOptions(TransportOptions options) {
m_transportOverrideOptions = options;
}
public String getServiceVersion() {
return m_serviceVersion;
}
public void setServiceVersion(String version) {
checkReadOnly();
m_serviceVersion = version;
}
public Collection<String> getRetryTransportStatusCodes() {
if (isReadOnly()) {
return new HashSet<String>(m_retryTransportStatusCodes);
}
return m_retryTransportStatusCodes;
}
public void setRetryTransportStatusCodes(Collection<String> retryTransportStatusCodes) {
checkReadOnly();
m_retryTransportStatusCodes = retryTransportStatusCodes;
}
public Collection<String> getRetryExceptionClasses() {
if (isReadOnly()) {
return new HashSet<String>(m_retryExceptionClasses);
}
return m_retryExceptionClasses;
}
public void setRetryExceptionClasses(Collection<String> retryExceptionClasses) {
checkReadOnly();
m_retryExceptionClasses = retryExceptionClasses;
}
public Collection<String> getRetryErrorIds() {
if (isReadOnly()) {
return new HashSet<String>(m_retryErrorIds);
}
return m_retryErrorIds;
}
public void setRetryErrorIds(Collection<String> errorIds) {
checkReadOnly();
m_retryErrorIds = errorIds;
}
public Collection<String> getMarkdownTransportStatusCodes() {
if (isReadOnly()) {
return new HashSet<String>(m_markdownTransportStatusCodes);
}
return m_markdownTransportStatusCodes;
}
public void setMarkdownTransportStatusCodes(Collection<String> value) {
checkReadOnly();
m_markdownTransportStatusCodes = value;
}
public Collection<String> getMarkdownExceptionClasses() {
if (isReadOnly()) {
return new HashSet<String>(m_markdownExceptionClasses);
}
return m_markdownExceptionClasses;
}
public void setMarkdownExceptionClasses(Collection<String> value) {
checkReadOnly();
m_markdownExceptionClasses = value;
}
public Collection<String> getMarkdownErrorIds() {
if (isReadOnly()) {
return new HashSet<String>(m_markdownErrorIds);
}
return m_markdownErrorIds;
}
public void setMarkdownErrorIds(Set<String> value) {
checkReadOnly();
m_markdownErrorIds = value;
}
/**
* Returns the consumerId
*/
public String getConsumerId() {
return m_consumerId;
}
/**
* @param consumerId the consumer id to set
*/
public void setConsumerId(String consumerId) {
checkReadOnly();
m_consumerId = consumerId;
}
/**
* Returns the invocation use case
*/
public String getInvocationUseCase() {
return m_invocationUseCase;
}
/**
* @param useCase the invocation use case to set
*/
public void setInvocationUseCase(String useCase) {
checkReadOnly();
m_invocationUseCase = useCase;
}
/**
* Returns the invocation use case
*/
public String getCustomErrorResponseAdapter() {
return m_customErrorResponseAdapter;
}
/**
* @param useCase the invocation use case to set
*/
public void setCustomErrorResponseAdapter(String value) {
checkReadOnly();
m_customErrorResponseAdapter = value;
}
/**
* Returns the error data provider class name
*/
public String getErrorDataProviderClass() {
return m_errorDataProviderClass;
}
/**
* @param value the error data provider class name to set
*/
public void setErrorDataProviderClass(String value) {
checkReadOnly();
m_errorDataProviderClass = value;
}
/**
* Returns the cache provider class name
*/
public String getCacheProviderClass() {
return m_cacheProviderClass;
}
/**
* @param value the cache provider class name to set
*/
public void setCacheProviderClass(String value) {
checkReadOnly();
m_cacheProviderClass = value;
}
/**
* Returns if cache is to be skipped on local transport
*/
public Boolean isCacheDisabledOnLocal() {
return m_disableCacheOnLocal;
}
/**
* @param value set cache skip on local flag
*/
public void setCacheDisabledOnLocal(Boolean value) {
checkReadOnly();
m_disableCacheOnLocal = value;
}
/**
* Returns if cache is to be skipped on errors during provider init
*/
public Boolean isSkipCacheOnError() {
return m_skipCacheOnError;
}
/**
* @param value set cache skip on local flag
*/
public void setSkipCacheOnError(Boolean value) {
checkReadOnly();
m_skipCacheOnError = value;
}
/**
* Returns the m_preferredGlobalId
*/
public String getPreferredGlobalId() {
return m_preferredGlobalId;
}
/**
* @param globalId the m_preferredGlobalId to set
*/
public void setPreferredGlobalId(String globalId) {
checkReadOnly();
m_preferredGlobalId = globalId;
}
public String getMessageProtocol() {
return m_messageProtocol;
}
public void setMessageProtocol(String protocol) {
checkReadOnly();
m_messageProtocol = protocol;
}
public String getResponseTransport() {
return m_responseTransport;
}
public void setResponseTransport(String transport) {
checkReadOnly();
m_responseTransport = transport;
}
/**
* Returns the m_serviceLocation
*/
public String getServiceLocation() {
return m_serviceLocation;
}
/**
* @param location the m_serviceLocation to set
*/
public void setServiceLocation(String location) {
checkReadOnly();
m_serviceLocation = location;
}
/**
* Returns the m_serviceLocationMap
*/
public Map<String, String> getServiceLocationMap() {
if (isReadOnly())
return new HashMap<String,String>(m_serviceLocationMap);
return m_serviceLocationMap;
}
/**
* @param locationMap the m_serviceLocationMap to set
*/
public void setServiceLocationMap(Map<String, String> locationMap) {
checkReadOnly();
m_serviceLocationMap = locationMap;
}
/**
*
* @return environment name used for a cc.xml
*/
public String getEnvName() {
return m_envName;
}
/**
* Sets the serviceLocation from the map for only valid keys
* @param env the environment to determine the serviceLocation from LocationMappings
*/
public String setServiceLocationFromLocationMapping(String env) {
String locationUrl = m_serviceLocationMap.get(env);
if (locationUrl != null) {
setServiceLocation(locationUrl);
} else {
LogManager.getInstance(this.getClass()).log(Level.SEVERE,
"No Service location mapped for " + env );
}
return locationUrl;
}
/**
* Returns the m_wsdlLocation
*/
public String getWsdlLocation() {
return m_wsdlLocation;
}
/**
* @param location the m_wsdlLocation to set
*/
public void setWsdlLocation(String location) {
checkReadOnly();
m_wsdlLocation = location;
}
public String getClientName() {
return m_clientName;
}
public void setClientName(String name) {
checkReadOnly();
m_clientName = name;
}
/**
* Returns the URI Path
*/
public String getUrlPathInfo() {
return m_urlPathInfo;
}
/**
* Sets the URI Path
* @param urlPathInfo the URI Path
*/
public void setUrlPathInfo(String urlPathInfo) {
checkReadOnly();
m_urlPathInfo = urlPathInfo;
}
@Override
public void dump(StringBuffer sb) {
super.dump(sb);
if (m_clientName != null) {
sb.append("clientName="+m_clientName+NL);
}
if(m_envName !=null) {
sb.append("envName="+m_envName);
}
sb.append("========== Invocation Options =========="+NL);
if (m_preferredLocale != null) {
sb.append("preferredLocale="+m_preferredLocale+NL);
}
if (m_serviceLocation != null) {
sb.append("serviceLocation="+m_serviceLocation+NL);
}
if (m_serviceLocationMap != null && !m_serviceLocationMap.isEmpty()) {
sb.append("serviceLocationMapEntries=");
ConfigUtils.dumpStringMap(sb, m_serviceLocationMap, "\t");
sb.append(NL);
}
if (m_wsdlLocation != null) {
sb.append("wsdlLocation="+m_wsdlLocation+NL);
}
if (m_useREST != null) {
sb.append("useREST=" + m_useREST + NL);
}
if (m_maxURLLengthForREST != null) {
sb.append("maxUrlLengthForREST=" + m_maxURLLengthForREST + NL);
}
if (m_requestPayloadLog != null) {
sb.append("requestPayloadLog="+m_requestPayloadLog+NL);
}
if (m_requestPayloadCalLog != null) {
sb.append("requestPayloadLog="+m_requestPayloadCalLog+NL);
}
if (m_responsePayloadLog != null) {
sb.append("responsePayloadLog="+m_responsePayloadLog+NL);
}
if (m_requestPayloadCalLog != null) {
sb.append("responsePayloadLog="+m_responsePayloadCalLog+NL);
}
if (m_transportOverrideOptions != null) {
sb.append("========== Transport Override Options =========="+NL);
ConfigUtils.dumpTransportOptions(sb, m_transportOverrideOptions, "\t");
}
if (m_transportOverrideHeaderOptions != null && !m_transportOverrideHeaderOptions.isEmpty()) {
sb.append("========== Transport Override Header Options =========="+NL);
ConfigUtils.dumpStringMap(sb, m_transportOverrideHeaderOptions, "\t");
}
if (m_preferredEncoding != null) {
sb.append("preferredEncoding="+m_preferredEncoding+NL);
}
sb.append("========== Retry options ==========" +NL);
if (m_appLevelNumRetries != null) {
sb.append("appLevelNumRetries="+m_appLevelNumRetries+NL);
}
if (m_retryHandlerClass != null) {
sb.append("retryHandlerClass="+m_retryHandlerClass+NL);
}
if (m_retryTransportStatusCodes != null && !m_retryTransportStatusCodes.isEmpty()) {
sb.append("retry transport status codes=");
TreeSet<String> ss = new TreeSet<String>(m_retryTransportStatusCodes);
ConfigUtils.<String>dumpList(sb, ss);
sb.append(NL);
}
if (m_retryExceptionClasses != null && !m_retryExceptionClasses.isEmpty()) {
sb.append("exception classes=");
TreeSet<String> exceptionClasses = new TreeSet<String>(m_retryExceptionClasses);
ConfigUtils.<String>dumpList(sb, exceptionClasses);
sb.append(NL);
}
if (m_retryErrorIds != null && !m_retryErrorIds.isEmpty()) {
sb.append("error ids=");
ConfigUtils.dumpList(sb, m_retryErrorIds);
sb.append(NL);
}
sb.append("========== Markdown options ==========" +NL);
if (m_markdownEnabled != null) {
sb.append("enabled="+m_markdownEnabled+NL);
}
if (m_markdownErrCountThreshold != null) {
sb.append("errCountThreshold="+m_markdownErrCountThreshold+NL);
}
if (m_markdownStateFactoryClass != null) {
sb.append("markdownStateFactoryClass="+m_markdownStateFactoryClass+NL);
}
if (m_markdownTransportStatusCodes != null && !m_markdownTransportStatusCodes.isEmpty()) {
sb.append("transport status codes=");
ConfigUtils.dumpList(sb, m_markdownTransportStatusCodes);
sb.append(NL);
}
if (m_markdownExceptionClasses != null && !m_markdownExceptionClasses.isEmpty()) {
sb.append("exception classes=");
ConfigUtils.dumpList(sb, m_markdownExceptionClasses);
sb.append(NL);
}
if (m_markdownErrorIds != null && !m_markdownErrorIds.isEmpty()) {
sb.append("error ids=");
ConfigUtils.dumpList(sb, m_markdownErrorIds);
sb.append(NL);
}
sb.append("========== Data Bindings =========="+NL);
if (m_requestDataBinding != null) {
sb.append("requestDataBinding="+m_requestDataBinding+NL);
}
if (m_responseDataBinding != null) {
sb.append("responseDataBinding="+m_responseDataBinding+NL);
}
if (m_preferredTransport != null) {
sb.append("preferredTransport="+m_preferredTransport+NL);
}
if (m_serviceVersion != null) {
sb.append("serviceVersion="+m_serviceVersion+NL);
}
if (m_invocationUseCase != null) {
sb.append("invocationUseCase="+m_invocationUseCase+NL);
}
if (m_consumerId != null) {
sb.append("consumerId="+m_consumerId+NL);
}
if (m_preferredGlobalId != null) {
sb.append("preferredGlobalId="+m_preferredGlobalId+NL);
}
if (m_messageProtocol != null) {
sb.append("messageProtocol="+m_messageProtocol+NL);
}
if (m_responseTransport != null) {
sb.append("responseTransport="+m_responseTransport+NL);
}
if (m_customErrorResponseAdapter != null) {
sb.append("customErrorResponseAdapter="+m_customErrorResponseAdapter+NL);
}
if (m_errorDataProviderClass != null) {
sb.append("errorDataProviderClass="+m_errorDataProviderClass+NL);
}
if (m_cacheProviderClass != null) {
sb.append("cacheProviderClass="+m_cacheProviderClass+NL);
}
if (m_disableCacheOnLocal != null) {
sb.append("disableCacheOnLocal="+m_disableCacheOnLocal+NL);
}
if (m_skipCacheOnError != null) {
sb.append("skipCacheOnError="+m_skipCacheOnError+NL);
}
if (m_urlPathInfo != null) {
sb.append("urlPathInfo=" + m_urlPathInfo + NL);
}
}
public ArrayList<String> getPreferredLocaleSet() {
final String preferredLocale = m_preferredLocale == null ? "" : m_preferredLocale;
StringTokenizer tokenizer = new StringTokenizer(preferredLocale, ";");
ArrayList<String> outList = new ArrayList<String>();
while (tokenizer.hasMoreTokens()) {
String token = tokenizer.nextToken();
outList.add(token);
}
return outList;
}
public boolean isIgnoreServiceVersion() {
return m_ignoreServiceVersion;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.util.Map;
import java.util.Random;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.ConstantExpression;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.Divide;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.plan.OperatorKey;
import org.apache.pig.test.utils.GenRandomData;
import org.joda.time.DateTime;
import org.junit.Before;
import org.junit.Test;
public class TestDivide {
Random r = new Random(42L);
ConstantExpression lt, rt;
Divide op = new Divide(new OperatorKey("", r.nextLong()));
@Before
public void setUp() throws Exception {
lt = new ConstantExpression(new OperatorKey("", r.nextLong()));
rt = new ConstantExpression(new OperatorKey("", r.nextLong()));
}
@Test
public void testOperator() throws ExecException {
// int TRIALS = 10;
byte[] types = { DataType.BAG, DataType.BOOLEAN, DataType.BYTEARRAY, DataType.CHARARRAY,
DataType.DOUBLE, DataType.FLOAT, DataType.INTEGER, DataType.LONG,
DataType.DATETIME, DataType.MAP, DataType.TUPLE };
// Map<Byte,String> map = GenRandomData.genTypeToNameMap();
System.out.println("Testing DIVIDE operator");
for (byte type : types) {
lt.setResultType(type);
rt.setResultType(type);
op.setLhs(lt);
op.setRhs(rt);
switch (type) {
case DataType.BAG:
DataBag inpdb1 = GenRandomData.genRandSmallTupDataBag(r, 10, 100);
DataBag inpdb2 = GenRandomData.genRandSmallTupDataBag(r, 10, 100);
lt.setValue(inpdb1);
rt.setValue(inpdb2);
Result resdb = op.getNextDataBag();
assertEquals(resdb.returnStatus, POStatus.STATUS_ERR);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpdb2);
resdb = op.getNextDataBag();
assertEquals(resdb.returnStatus, POStatus.STATUS_ERR);
// test with null in rhs
lt.setValue(inpdb1);
rt.setValue(null);
resdb = op.getNextDataBag();
assertEquals(resdb.returnStatus, POStatus.STATUS_ERR);
break;
case DataType.BOOLEAN:
Boolean inpb1 = r.nextBoolean();
Boolean inpb2 = r.nextBoolean();
lt.setValue(inpb1);
rt.setValue(inpb2);
Result resb = op.getNextBoolean();
assertEquals(resb.returnStatus, POStatus.STATUS_ERR);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpb2);
resb = op.getNextBoolean();
assertEquals(resb.returnStatus, POStatus.STATUS_ERR);
// test with null in rhs
lt.setValue(inpb1);
rt.setValue(null);
resb = op.getNextBoolean();
assertEquals(resb.returnStatus, POStatus.STATUS_ERR);
break;
case DataType.BYTEARRAY: {
DataByteArray inpba1 = GenRandomData.genRandDBA(r);
DataByteArray inpba2 = GenRandomData.genRandDBA(r);
lt.setValue(inpba1);
rt.setValue(inpba2);
Result resba = op.getNextDataByteArray();
//DataByteArray expected = new DataByteArray(inpba1.toString() + inpba2.toString());
//assertEquals(expected, (DataByteArray)resba.result);
assertEquals(POStatus.STATUS_ERR, resba.returnStatus);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpba2);
resba = op.getNextDataByteArray();
assertEquals(resba.returnStatus, POStatus.STATUS_ERR);
// test with null in rhs
lt.setValue(inpba1);
rt.setValue(null);
resba = op.getNextDataByteArray();
assertEquals(resba.returnStatus, POStatus.STATUS_ERR);
break;
}
case DataType.CHARARRAY: {
String inps1 = GenRandomData.genRandString(r);
String inps2 = GenRandomData.genRandString(r);
lt.setValue(inps1);
rt.setValue(inps2);
Result ress = op.getNextString();
/*String expected = new String(inps1 + inps2);
assertEquals(expected, (String)ress.result);*/
assertEquals(POStatus.STATUS_ERR, ress.returnStatus);
// test with null in lhs
lt.setValue(null);
rt.setValue(inps2);
ress = op.getNextString();
assertEquals(ress.returnStatus, POStatus.STATUS_ERR);
// test with null in rhs
lt.setValue(inps1);
rt.setValue(null);
ress = op.getNextString();
assertEquals(ress.returnStatus, POStatus.STATUS_ERR);
break;
}
case DataType.DOUBLE: {
Double inpd1 = r.nextDouble();
Double inpd2 = r.nextDouble();
lt.setValue(inpd1);
rt.setValue(inpd2);
Result resd = op.getNextDouble();
Double expected = new Double(inpd1 / inpd2);
assertEquals(expected, (Double)resd.result);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpd2);
resd = op.getNextDouble();
assertEquals(null, (Double)resd.result);
// test with null in rhs
lt.setValue(inpd1);
rt.setValue(null);
resd = op.getNextDouble();
assertEquals(null, (Double)resd.result);
// test divide by 0
lt.setValue(inpd1);
rt.setValue(0.0);
resd = op.getNextDouble();
assertEquals(null, (Double)resd.result);
break;
}
case DataType.FLOAT: {
Float inpf1 = r.nextFloat();
Float inpf2 = r.nextFloat();
lt.setValue(inpf1);
rt.setValue(inpf2);
Result resf = op.getNextFloat();
Float expected = new Float(inpf1 / inpf2);
assertEquals(expected, (Float)resf.result);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpf2);
resf = op.getNextFloat();
assertEquals(null, (Float)resf.result);
// test with null in rhs
lt.setValue(inpf1);
rt.setValue(null);
resf = op.getNextFloat();
assertEquals(null, (Float)resf.result);
// test divide by 0
lt.setValue(inpf1);
rt.setValue(0.0f);
resf = op.getNextFloat();
assertEquals(null, (Float)resf.result);
break;
}
case DataType.INTEGER: {
Integer inpi1 = r.nextInt();
Integer inpi2 = r.nextInt();
lt.setValue(inpi1);
rt.setValue(inpi2);
Result resi = op.getNextInteger();
Integer expected = new Integer(inpi1 / inpi2);
assertEquals(expected, (Integer) resi.result);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpi2);
resi = op.getNextInteger();
assertEquals(null, (Integer)resi.result);
// test with null in rhs
lt.setValue(inpi1);
rt.setValue(null);
resi = op.getNextInteger();
assertEquals(null, (Integer)resi.result);
// test divide by 0
lt.setValue(inpi1);
rt.setValue(0);
resi = op.getNextInteger();
assertEquals(null, (Integer)resi.result);
break;
}
case DataType.LONG: {
Long inpl1 = r.nextLong();
Long inpl2 = r.nextLong();
lt.setValue(inpl1);
rt.setValue(inpl2);
Result resl = op.getNextLong();
Long expected = new Long(inpl1 / inpl2);
assertEquals(expected, (Long)resl.result);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpl2);
resl = op.getNextLong();
assertEquals(null, (Long)resl.result);
// test with null in rhs
lt.setValue(inpl1);
rt.setValue(null);
resl = op.getNextLong();
assertEquals(null, (Long)resl.result);
// test divide by 0
lt.setValue(inpl1);
rt.setValue(0l);
resl = op.getNextLong();
assertEquals(null, (Long)resl.result);
break;
}
case DataType.DATETIME:
DateTime inpdt1 = new DateTime(r.nextLong());
DateTime inpdt2 = new DateTime(r.nextLong());
lt.setValue(inpdt1);
rt.setValue(inpdt2);
Result resdt = op.getNextDateTime();
assertEquals(resdt.returnStatus, POStatus.STATUS_ERR);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpdt2);
resdt = op.getNextDateTime();
assertEquals(resdt.returnStatus, POStatus.STATUS_ERR);
// test with null in rhs
lt.setValue(inpdt1);
rt.setValue(null);
resdt = op.getNextDateTime();
assertEquals(resdt.returnStatus, POStatus.STATUS_ERR);
break;
case DataType.MAP: {
Map<String,Object> inpm1 = GenRandomData.genRandMap(r, 10);
Map<String,Object> inpm2 = GenRandomData.genRandMap(r, 10);
lt.setValue(inpm1);
rt.setValue(inpm2);
Result resm = op.getNextMap();
assertEquals(POStatus.STATUS_ERR, resm.returnStatus);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpm2);
resm = op.getNextMap();
assertEquals(POStatus.STATUS_ERR, resm.returnStatus);
// test with null in rhs
lt.setValue(inpm1);
rt.setValue(null);
resm = op.getNextMap();
assertEquals(POStatus.STATUS_ERR, resm.returnStatus);
break;
}
case DataType.TUPLE: {
Tuple inpt1 = GenRandomData.genRandSmallBagTuple(r, 10, 100);
Tuple inpt2 = GenRandomData.genRandSmallBagTuple(r, 10, 100);
lt.setValue(inpt1);
rt.setValue(inpt2);
Result rest = op.getNextTuple();
assertEquals(POStatus.STATUS_ERR, rest.returnStatus);
// test with null in lhs
lt.setValue(null);
rt.setValue(inpt2);
rest = op.getNextTuple();
assertEquals(POStatus.STATUS_ERR, rest.returnStatus);
// test with null in rhs
lt.setValue(inpt1);
rt.setValue(null);
rest = op.getNextTuple();
assertEquals(POStatus.STATUS_ERR, rest.returnStatus);
break;
}
}
}
}
}
| |
/**
*
*/
package mkl.testarea.pdfbox2.form;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.PDResources;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDType1Font;
import org.apache.pdfbox.pdmodel.font.Standard14Fonts.FontName;
import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotationWidget;
import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceDictionary;
import org.apache.pdfbox.pdmodel.interactive.form.PDAcroForm;
import org.apache.pdfbox.pdmodel.interactive.form.PDField;
import org.apache.pdfbox.pdmodel.interactive.form.PDTerminalField;
import org.apache.pdfbox.pdmodel.interactive.form.PDTextField;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* @author mklink
*
*/
public class DuplicateFields
{
final static File RESULT_FOLDER = new File("target/test-outputs", "form");
@BeforeClass
public static void setUpBeforeClass() throws Exception
{
RESULT_FOLDER.mkdirs();
}
/**
* <p>
* This test attempts to create duplicate fields in different ways
* </p>
* <ul>
* <li>A single field with multiple widget annotations, one per use;
* this is the "official" way.
* <li>A single field with a single, merged widget annotation which
* is referenced from different pages; this is not valid per se, cf.
* ISO 32000-1, section 12.5.2: "A given annotation dictionary shall
* be referenced from the Annots array of only one page". Unless
* simultaneously visible (e.g. in "Two Page View" page display),
* though, this works well in Adobe Reader.
* <li>Two separate fields, either both in the AcroForm Fields or
* not; this is not explicitly prohibited by the specification but
* seems not to be supported by Adobe. One might consider this an
* implicit requirement, there shall not be two fields with the
* same name unless these two fields are descendants of a single
* field which already also has this name.
* <li>One named field with two anonymous descendant fields each
* with two widgets. While this should work, it doesn't: Adobe
* ignores one of the four widgets.
* </ul>
*/
@Test
public void testCreateDuplicateFields() throws IOException
{
try (PDDocument document = new PDDocument())
{
PDPage page1 = new PDPage(PDRectangle.A4);
document.addPage(page1);
PDPage page2 = new PDPage(PDRectangle.A4);
document.addPage(page2);
PDFont font = new PDType1Font(FontName.HELVETICA);
PDResources resources = new PDResources();
resources.put(COSName.getPDFName("Helv"), font);
PDAcroForm acroForm = new PDAcroForm(document);
acroForm.setDefaultResources(resources);
//acroForm.setNeedAppearances(true);
document.getDocumentCatalog().setAcroForm(acroForm);
//
// SampleFieldA: single field with separate widgets.
//
PDTextField textBox = new PDTextField(acroForm);
textBox.setPartialName("SampleFieldA");
textBox.setDefaultAppearance("/Helv 12 Tf 0 0 1 rg");
acroForm.getFields().add(textBox);
PDAnnotationWidget widget1 = new PDAnnotationWidget();
PDRectangle rectA = new PDRectangle(50, 750, 250, 50);
widget1.setRectangle(rectA);
widget1.setPage(page1);
widget1.setParent(textBox);
page1.getAnnotations().add(widget1);
PDAnnotationWidget widget2 = new PDAnnotationWidget();
PDRectangle rect2 = new PDRectangle(50, 750, 250, 50);
widget2.setRectangle(rect2);
widget2.setPage(page2);
widget2.setParent(textBox);
page2.getAnnotations().add(widget2);
textBox.setWidgets(Arrays.asList(widget1, widget2));
textBox.setValue("A");
//
// SampleFieldB: single field with merged single widget.
//
textBox = new PDTextField(acroForm);
textBox.setPartialName("SampleFieldB");
textBox.setDefaultAppearance("/Helv 12 Tf 0 0 1 rg");
acroForm.getFields().add(textBox);
PDAnnotationWidget widget = textBox.getWidgets().get(0);
PDRectangle rectB = new PDRectangle(50, 650, 250, 50);
widget.setRectangle(rectB);
page1.getAnnotations().add(widget);
page2.getAnnotations().add(widget);
textBox.setValue("B");
//
// SampleFieldC: separate fields
//
textBox = new PDTextField(acroForm);
textBox.setPartialName("SampleFieldC");
textBox.setDefaultAppearance("/Helv 12 Tf 0 0 1 rg");
acroForm.getFields().add(textBox);
widget = textBox.getWidgets().get(0);
PDRectangle rectC = new PDRectangle(50, 550, 250, 50);
widget.setRectangle(rectC);
page1.getAnnotations().add(widget);
textBox.setValue("C1");
textBox = new PDTextField(acroForm);
textBox.setPartialName("SampleFieldC");
textBox.setDefaultAppearance("/Helv 12 Tf 0 0 1 rg");
acroForm.getFields().add(textBox);
widget = textBox.getWidgets().get(0);
widget.setRectangle(rectC);
page2.getAnnotations().add(widget);
textBox.setValue("C2");
//
// SampleFieldD: separate fields not in AcroForm
//
textBox = new PDTextField(acroForm);
textBox.setPartialName("SampleFieldD");
textBox.setDefaultAppearance("/Helv 12 Tf 0 0 1 rg");
widget = textBox.getWidgets().get(0);
PDRectangle rectD = new PDRectangle(50, 450, 250, 50);
widget.setRectangle(rectD);
page1.getAnnotations().add(widget);
textBox.setValue("D1");
textBox = new PDTextField(acroForm);
textBox.setPartialName("SampleFieldD");
textBox.setDefaultAppearance("/Helv 12 Tf 0 0 1 rg");
widget = textBox.getWidgets().get(0);
widget.setRectangle(rectD);
page2.getAnnotations().add(widget);
textBox.setValue("D2");
//
// SampleFieldE: one ancestor field with the name,
// separate anonymous child fields, separate widgets.
//
textBox = new PDTextField(acroForm);
textBox.setPartialName("SampleFieldE");
textBox.setDefaultAppearance("/Helv 12 Tf 0 1 0 rg");
acroForm.getFields().add(textBox);
COSDictionary anonTextBox1 = new COSDictionary();
anonTextBox1.setItem(COSName.PARENT, textBox);
anonTextBox1.setString(COSName.DA, "/Helv 12 Tf 0 0 1 rg");
widget1 = new PDAnnotationWidget();
PDRectangle rectE1 = new PDRectangle(50, 350, 250, 50);
widget1.setRectangle(rectE1);
widget1.setPage(page1);
widget1.getCOSObject().setItem(COSName.PARENT, anonTextBox1);
page1.getAnnotations().add(widget1);
widget2 = new PDAnnotationWidget();
widget2.setRectangle(rectE1);
widget2.setPage(page2);
widget2.getCOSObject().setItem(COSName.PARENT, anonTextBox1);
page2.getAnnotations().add(widget2);
COSArray kids = new COSArray();
kids.add(widget1.getCOSObject());
kids.add(widget2.getCOSObject());
anonTextBox1.setItem(COSName.KIDS, kids);
PDTextField tempField = new PDTextField(acroForm);
tempField.setDefaultAppearance("/Helv 12 Tf 0 0 1 rg");
tempField.getWidgets().get(0).setRectangle(rectE1);
tempField.setValue("E");
PDAppearanceDictionary appearance = tempField.getWidgets().get(0).getAppearance();
widget1.setAppearance(appearance);
widget2.setAppearance(appearance);
COSDictionary anonTextBox2 = new COSDictionary();
anonTextBox2.setItem(COSName.PARENT, textBox);
anonTextBox2.setString(COSName.DA, "/Helv 12 Tf 1 0 0 rg");
widget1 = new PDAnnotationWidget();
PDRectangle rectE2 = new PDRectangle(350, 350, 200, 50);
widget1.setRectangle(rectE2);
widget1.setPage(page2);
widget1.getCOSObject().setItem(COSName.PARENT, anonTextBox2);
page2.getAnnotations().add(widget1);
widget2 = new PDAnnotationWidget();
widget2.setRectangle(rectE2);
widget2.setPage(page1);
widget2.getCOSObject().setItem(COSName.PARENT, anonTextBox2);
page1.getAnnotations().add(widget2);
kids = new COSArray();
kids.add(widget2.getCOSObject());
kids.add(widget1.getCOSObject());
anonTextBox2.setItem(COSName.KIDS, kids);
kids = new COSArray();
kids.add(anonTextBox1);
kids.add(anonTextBox2);
textBox.getCOSObject().setItem(COSName.KIDS, kids);
tempField = new PDTextField(acroForm);
tempField.setDefaultAppearance("/Helv 12 Tf 1 0 0 rg");
tempField.getWidgets().get(0).setRectangle(rectE2);
tempField.setValue("E");
appearance = tempField.getWidgets().get(0).getAppearance();
widget1.setAppearance(appearance);
widget2.setAppearance(appearance);
textBox.getCOSObject().setString(COSName.V, "E");
document.save(new File(RESULT_FOLDER, "duplicateFields.pdf"));
}
try ( InputStream stream = new FileInputStream(new File(RESULT_FOLDER, "duplicateFields.pdf"));
PDDocument document = Loader.loadPDF(stream) )
{
PDAcroForm acroForm = document.getDocumentCatalog().getAcroForm();
for (PDField field : acroForm.getFieldTree())
{
System.out.println(field.getFullyQualifiedName() + " (" + field.getClass().getSimpleName() + ")");
if (field instanceof PDTerminalField)
{
for (PDAnnotationWidget widget: ((PDTerminalField)field).getWidgets())
{
System.out.println("\t" + widget.getAnnotationName());
}
field.setValue("changed " + field.getValueAsString());
}
}
document.save(new File(RESULT_FOLDER, "duplicateFieldsChanged.pdf"));
}
}
}
| |
package sanchez.sergio.persistence.entities;
import sanchez.sergio.persistence.entities.AbstractEntity;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.Size;
import org.hibernate.validator.constraints.NotBlank;
import org.springframework.hateoas.Link;
import static org.springframework.hateoas.mvc.ControllerLinkBuilder.linkTo;
import sanchez.sergio.action.ActivateAccount;
import sanchez.sergio.action.ArchiveAccount;
import sanchez.sergio.action.ConfirmAccount;
import sanchez.sergio.action.SuspendAccount;
import sanchez.sergio.domain.Aggregate;
import sanchez.sergio.domain.Command;
import sanchez.sergio.domain.Module;
import sanchez.sergio.persistence.entities.User.UserChangePassword;
import sanchez.sergio.persistence.entities.User.UserCreation;
import sanchez.sergio.persistence.constraints.FieldMatch;
import sanchez.sergio.persistence.constraints.FieldNotMatch;
import sanchez.sergio.persistence.constraints.UserCurrentPassword;
import sanchez.sergio.persistence.constraints.UsernameUnique;
import sanchez.sergio.service.UserModule;
import sanchez.sergio.controller.UserController;
/**
* @author sergio
* @param <T>
*/
@Entity
@Table(name = "USERS")
@FieldMatch(first = "passwordClear", second = "confirmPassword", message = "{user.pass.not.match}", groups = {UserCreation.class, UserChangePassword.class})
@FieldNotMatch(first = "currentClearPassword", second = "passwordClear", message = "{user.current.pass.not.match}", groups = {UserChangePassword.class})
public class User extends AbstractEntity<AccountEvent, Long> {
/* Marker interface for grouping validations to be applied at the time of creating a (new) user. */
public interface UserCreation {
}
/* Marker interface for grouping validations to be applied at the time of updating a (existing) user. */
public interface UserUpdate {
}
/* Marker interface for grouping validations to be applied at the time of change user password. */
public interface UserChangePassword {
}
/* Marker interface for grouping validations to be applied at the time of updating a user status by administrator. */
public interface UserStatusUpdate {
}
@NotBlank(message = "{user.username.notnull}", groups = {UserCreation.class, UserUpdate.class})
@Size(min = 5, max = 15, message = "{user.username.size}", groups = {UserCreation.class, UserUpdate.class})
@UsernameUnique(message = "{user.username.unique}", groups = {UserCreation.class, UserUpdate.class})
@Column(nullable = false, length = 30, unique = true)
private String username;
@NotBlank(message = "{user.username.notnull}", groups = {UserCreation.class, UserUpdate.class})
@Size(min = 5, max = 15, message = "{user.username.size}", groups = {UserCreation.class, UserUpdate.class})
@UsernameUnique(message = "{user.username.unique}", groups = {UserCreation.class, UserUpdate.class})
@Column(name="display_name", nullable = false, length = 30, unique = true)
private String displayName;
@NotBlank(message = "{user.current.pass.notnull}", groups = {UserChangePassword.class})
@UserCurrentPassword(message = "{user.current.pass.not.match}", groups = {UserChangePassword.class})
@Transient
private String currentClearPassword;
@NotBlank(message = "{user.pass.notnull}", groups = {UserCreation.class, UserChangePassword.class})
@Size(min = 8, max = 25, message = "{user.pass.size}", groups = {UserCreation.class, UserChangePassword.class})
@Transient
private String passwordClear;
@NotBlank(message = "{user.confirm.pass.notnull}", groups = {UserCreation.class, UserChangePassword.class})
@Transient
private String confirmPassword;
@Column(length = 60)
private String password;
@Column(name="first_name", unique = false, nullable = false, length = 30)
private String firstName;
@Column(name="last_name", unique = false, nullable = false, length = 30)
private String lastName;
@Column(unique = true, nullable = false, length = 90)
private String email;
@Column(name="title", unique = false, nullable = false, length = 30)
private String title;
@Column(name="initials", unique = false, nullable = false, length = 2)
private String initials;
@ManyToMany(fetch = FetchType.EAGER, cascade = {CascadeType.MERGE})
@JoinTable(
name = "USER_AUTHORITIES",
joinColumns = @JoinColumn(name = "user_id", referencedColumnName = "identity"),
inverseJoinColumns = @JoinColumn(name = "authority_id", referencedColumnName = "ID")
)
private Set<Authority> authorities = new HashSet();
@Enumerated(value = EnumType.STRING)
@Column(unique = false, nullable = false)
private AccountStatus status;
public User() {
status = AccountStatus.ACCOUNT_CREATED;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getCurrentClearPassword() {
return currentClearPassword;
}
public void setCurrentClearPassword(String currentClearPassword) {
this.currentClearPassword = currentClearPassword;
}
public String getPasswordClear() {
return passwordClear;
}
public void setPasswordClear(String passwordClear) {
this.passwordClear = passwordClear;
}
public String getConfirmPassword() {
return confirmPassword;
}
public void setConfirmPassword(String confirmPassword) {
this.confirmPassword = confirmPassword;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Set<Authority> getAuthorities() {
return authorities;
}
public void setAuthorities(Set<Authority> authorities) {
this.authorities = authorities;
}
public void addAuthority(Authority authority){
if(!this.authorities.contains(authority)){
this.authorities.add(authority);
authority.addUser(this);
}
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public AccountStatus getStatus() {
return status;
}
public void setStatus(AccountStatus status) {
this.status = status;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getInitials() {
return initials;
}
public void setInitials(String initials) {
this.initials = initials;
}
@Command(method = "activate", controller = UserController.class)
public User activate() {
return getAction(ActivateAccount.class).apply(this);
}
@Command(method = "archive", controller = UserController.class)
public User archive() {
return getAction(ArchiveAccount.class).apply(this);
}
@Command(method = "confirm", controller = UserController.class)
public User confirm() {
return getAction(ConfirmAccount.class).apply(this);
}
@Command(method = "suspend", controller = UserController.class)
public User suspend() {
return getAction(SuspendAccount.class).apply(this);
}
/**
* Retrieves an instance of the {@link Module} for this instance
*
* @return the provider for this instance
* @throws IllegalArgumentException if the application context is
* unavailable or the provider does not exist
*/
@Override
@SuppressWarnings("unchecked")
public <T extends Module<A>, A extends Aggregate<AccountEvent, Long>> T getModule() throws IllegalArgumentException {
UserModule accountProvider = getModule(UserModule.class);
return (T) accountProvider;
}
/**
* Returns the {@link Link} with a rel of {@link Link#REL_SELF}.
*/
@Override
public Link getId() {
return linkTo(UserController.class)
.slash("accounts")
.slash(getIdentity())
.withSelfRel();
}
}
| |
/*
Derby - Class org.apache.derby.impl.sql.catalog.SYSTABLESRowFactory
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.catalog;
import org.apache.derby.iapi.types.DataValueDescriptor;
import org.apache.derby.iapi.sql.dictionary.SystemColumn;
import org.apache.derby.iapi.types.DataValueFactory;
import org.apache.derby.iapi.types.RowLocation;
import org.apache.derby.iapi.types.SQLChar;
import org.apache.derby.iapi.types.SQLVarchar;
import org.apache.derby.iapi.sql.dictionary.CatalogRowFactory;
import org.apache.derby.iapi.sql.dictionary.TupleDescriptor;
import org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.sql.dictionary.SchemaDescriptor;
import org.apache.derby.iapi.sql.dictionary.TableDescriptor;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.iapi.sql.execute.ExecIndexRow;
import org.apache.derby.iapi.sql.execute.ExecutionFactory;
import org.apache.derby.iapi.sql.execute.ExecRow;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.catalog.UUID;
import org.apache.derby.iapi.services.uuid.UUIDFactory;
import org.apache.derby.iapi.store.access.TransactionController;
/**
* Factory for creating a SYSTABLES row.
*
*
* @version 0.1
*/
class SYSTABLESRowFactory extends CatalogRowFactory
{
private static final String TABLENAME_STRING = "SYSTABLES";
protected static final int SYSTABLES_COLUMN_COUNT = 5;
/* Column #s for systables (1 based) */
protected static final int SYSTABLES_TABLEID = 1;
protected static final int SYSTABLES_TABLENAME = 2;
protected static final int SYSTABLES_TABLETYPE = 3;
protected static final int SYSTABLES_SCHEMAID = 4;
protected static final int SYSTABLES_LOCKGRANULARITY = 5;
protected static final int SYSTABLES_INDEX1_ID = 0;
protected static final int SYSTABLES_INDEX1_TABLENAME = 1;
protected static final int SYSTABLES_INDEX1_SCHEMAID = 2;
protected static final int SYSTABLES_INDEX2_ID = 1;
protected static final int SYSTABLES_INDEX2_TABLEID = 1;
// all indexes are unique.
private static final String[] uuids =
{
"80000018-00d0-fd77-3ed8-000a0a0b1900" // catalog UUID
,"80000028-00d0-fd77-3ed8-000a0a0b1900" // heap UUID
,"8000001a-00d0-fd77-3ed8-000a0a0b1900" // SYSTABLES_INDEX1
,"8000001c-00d0-fd77-3ed8-000a0a0b1900" // SYSTABLES_INDEX2
};
private static final int[][] indexColumnPositions =
{
{ SYSTABLES_TABLENAME, SYSTABLES_SCHEMAID},
{ SYSTABLES_TABLEID }
};
/////////////////////////////////////////////////////////////////////////////
//
// CONSTRUCTORS
//
/////////////////////////////////////////////////////////////////////////////
SYSTABLESRowFactory(UUIDFactory uuidf, ExecutionFactory ef, DataValueFactory dvf)
{
super(uuidf,ef,dvf);
initInfo(SYSTABLES_COLUMN_COUNT, TABLENAME_STRING, indexColumnPositions, (boolean[]) null, uuids);
}
/////////////////////////////////////////////////////////////////////////////
//
// METHODS
//
/////////////////////////////////////////////////////////////////////////////
/**
* Make a SYSTABLES row
*
* @return Row suitable for inserting into SYSTABLES.
*
* @exception StandardException thrown on failure
*/
public ExecRow makeRow(TupleDescriptor td,
TupleDescriptor parent)
throws StandardException
{
UUID oid;
String tabSType = null;
int tabIType;
ExecRow row;
String lockGranularity = null;
String tableID = null;
String schemaID = null;
String tableName = null;
if (td != null)
{
/*
** We only allocate a new UUID if the descriptor doesn't already have one.
** For descriptors replicated from a Source system, we already have an UUID.
*/
TableDescriptor descriptor = (TableDescriptor)td;
SchemaDescriptor schema = (SchemaDescriptor)parent;
oid = descriptor.getUUID();
if ( oid == null )
{
oid = getUUIDFactory().createUUID();
descriptor.setUUID(oid);
}
tableID = oid.toString();
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(schema != null,
"Schema should not be null unless empty row is true");
if (schema.getUUID() == null)
{
SanityManager.THROWASSERT("schema " + schema + " has a null OID");
}
}
schemaID = schema.getUUID().toString();
tableName = descriptor.getName();
/* RESOLVE - Table Type should really be a char in the descriptor
* T, S, V, S instead of 0, 1, 2, 3
*/
tabIType = descriptor.getTableType();
switch (tabIType)
{
case TableDescriptor.BASE_TABLE_TYPE:
tabSType = "T";
break;
case TableDescriptor.SYSTEM_TABLE_TYPE:
tabSType = "S";
break;
case TableDescriptor.VIEW_TYPE:
tabSType = "V";
break;
case TableDescriptor.SYNONYM_TYPE:
tabSType = "A";
break;
default:
if (SanityManager.DEBUG)
SanityManager.THROWASSERT("invalid table type");
}
char[] lockGChar = new char[1];
lockGChar[0] = descriptor.getLockGranularity();
lockGranularity = new String(lockGChar);
}
/* Insert info into systables */
/* RESOLVE - It would be nice to require less knowledge about systables
* and have this be more table driven.
*/
/* Build the row to insert */
row = getExecutionFactory().getValueRow(SYSTABLES_COLUMN_COUNT);
/* 1st column is TABLEID (UUID - char(36)) */
row.setColumn(SYSTABLES_TABLEID, new SQLChar(tableID));
/* 2nd column is NAME (varchar(30)) */
row.setColumn(SYSTABLES_TABLENAME, new SQLVarchar(tableName));
/* 3rd column is TABLETYPE (char(1)) */
row.setColumn(SYSTABLES_TABLETYPE, new SQLChar(tabSType));
/* 4th column is SCHEMAID (UUID - char(36)) */
row.setColumn(SYSTABLES_SCHEMAID, new SQLChar(schemaID));
/* 5th column is LOCKGRANULARITY (char(1)) */
row.setColumn(SYSTABLES_LOCKGRANULARITY, new SQLChar(lockGranularity));
return row;
}
/**
* Builds an empty index row.
*
* @param indexNumber Index to build empty row for.
* @param rowLocation Row location for last column of index row
*
* @return corresponding empty index row
* @exception StandardException thrown on failure
*/
ExecIndexRow buildEmptyIndexRow( int indexNumber,
RowLocation rowLocation)
throws StandardException
{
int ncols = getIndexColumnCount(indexNumber);
ExecIndexRow row = getExecutionFactory().getIndexableRow(ncols + 1);
row.setColumn(ncols + 1, rowLocation);
switch( indexNumber )
{
case SYSTABLES_INDEX1_ID:
/* 1st column is TABLENAME (varchar(128)) */
row.setColumn(1, new SQLVarchar());
/* 2nd column is SCHEMAID (UUID - char(36)) */
row.setColumn(2, new SQLChar());
break;
case SYSTABLES_INDEX2_ID:
/* 1st column is TABLEID (UUID - char(36)) */
row.setColumn(1,new SQLChar());
break;
} // end switch
return row;
}
/**
* Make a TableDescriptor out of a SYSTABLES row
*
* @param row a SYSTABLES row
* @param parentTupleDescriptor Null for this kind of descriptor.
* @param dd dataDictionary
* @param isolationLevel use this explicit isolation level. Only
* ISOLATION_REPEATABLE_READ (normal usage)
* or ISOLATION_READ_UNCOMMITTED (corner
* cases) supported for now.
* @exception StandardException thrown on failure
*/
TupleDescriptor buildDescriptor(
ExecRow row,
TupleDescriptor parentTupleDescriptor,
DataDictionary dd,
int isolationLevel)
throws StandardException
{
return buildDescriptorBody(row,
parentTupleDescriptor,
dd,
isolationLevel);
}
///////////////////////////////////////////////////////////////////////////
//
// ABSTRACT METHODS TO BE IMPLEMENTED BY CHILDREN OF CatalogRowFactory
//
///////////////////////////////////////////////////////////////////////////
/**
* Make a TableDescriptor out of a SYSTABLES row
*
* @param row a SYSTABLES row
* @param parentTupleDescriptor Null for this kind of descriptor.
* @param dd dataDictionary
*
* @return a table descriptor equivalent to a SYSTABLES row
*
* @exception StandardException thrown on failure
*/
public TupleDescriptor buildDescriptor(
ExecRow row,
TupleDescriptor parentTupleDescriptor,
DataDictionary dd )
throws StandardException
{
return buildDescriptorBody(
row,
parentTupleDescriptor,
dd,
TransactionController.ISOLATION_REPEATABLE_READ);
}
public TupleDescriptor buildDescriptorBody(
ExecRow row,
TupleDescriptor parentTupleDescriptor,
DataDictionary dd,
int isolationLevel)
throws StandardException
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(row.nColumns() == SYSTABLES_COLUMN_COUNT, "Wrong number of columns for a SYSTABLES row");
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
String tableUUIDString;
String schemaUUIDString;
int tableTypeEnum;
String lockGranularity;
String tableName, tableType;
DataValueDescriptor col;
UUID tableUUID;
UUID schemaUUID;
SchemaDescriptor schema;
TableDescriptor tabDesc;
/* 1st column is TABLEID (UUID - char(36)) */
col = row.getColumn(SYSTABLES_TABLEID);
tableUUIDString = col.getString();
tableUUID = getUUIDFactory().recreateUUID(tableUUIDString);
/* 2nd column is TABLENAME (varchar(128)) */
col = row.getColumn(SYSTABLES_TABLENAME);
tableName = col.getString();
/* 3rd column is TABLETYPE (char(1)) */
col = row.getColumn(SYSTABLES_TABLETYPE);
tableType = col.getString();
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(tableType.length() == 1, "Fourth column type incorrect");
}
switch (tableType.charAt(0))
{
case 'T' :
tableTypeEnum = TableDescriptor.BASE_TABLE_TYPE;
break;
case 'S' :
tableTypeEnum = TableDescriptor.SYSTEM_TABLE_TYPE;
break;
case 'V' :
tableTypeEnum = TableDescriptor.VIEW_TYPE;
break;
case 'A' :
tableTypeEnum = TableDescriptor.SYNONYM_TYPE;
break;
default:
if (SanityManager.DEBUG)
SanityManager.THROWASSERT("Fourth column value invalid");
tableTypeEnum = -1;
}
/* 4th column is SCHEMAID (UUID - char(36)) */
col = row.getColumn(SYSTABLES_SCHEMAID);
schemaUUIDString = col.getString();
schemaUUID = getUUIDFactory().recreateUUID(schemaUUIDString);
schema = dd.getSchemaDescriptor(schemaUUID, isolationLevel, null);
/* 5th column is LOCKGRANULARITY (char(1)) */
col = row.getColumn(SYSTABLES_LOCKGRANULARITY);
lockGranularity = col.getString();
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(lockGranularity.length() == 1, "Fifth column type incorrect");
}
// RESOLVE - Deal with lock granularity
tabDesc = ddg.newTableDescriptor(tableName, schema, tableTypeEnum, lockGranularity.charAt(0));
tabDesc.setUUID(tableUUID);
return tabDesc;
}
/**
* Get the table name out of this SYSTABLES row
*
* @param row a SYSTABLES row
*
* @return string, the table name
*
* @exception StandardException thrown on failure
*/
protected String getTableName(ExecRow row)
throws StandardException
{
DataValueDescriptor col;
col = row.getColumn(SYSTABLES_TABLENAME);
return col.getString();
}
/**
* Builds a list of columns suitable for creating this Catalog.
*
*
* @return array of SystemColumn suitable for making this catalog.
*/
public SystemColumn[] buildColumnList()
throws StandardException
{
return new SystemColumn[] {
SystemColumnImpl.getUUIDColumn("TABLEID", false),
SystemColumnImpl.getIdentifierColumn("TABLENAME", false),
SystemColumnImpl.getIndicatorColumn("TABLETYPE"),
SystemColumnImpl.getUUIDColumn("SCHEMAID", false),
SystemColumnImpl.getIndicatorColumn("LOCKGRANULARITY"),
};
}
}
| |
/**
* The MIT License (MIT)
*
* Copyright (c) 2016 Isak Karlsson
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
* associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
* NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.briljantframework;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import java.util.List;
import org.briljantframework.array.Array;
import org.briljantframework.array.Arrays;
import org.briljantframework.array.DoubleArray;
import org.briljantframework.array.IntArray;
import org.junit.Test;
/**
* @author Isak Karlsson <isak-kar@dsv.su.se>
*/
public class ArraysTest {
@Test
public void ReadIdx() throws Exception {
// DoubleArray array = Arrays
// .readIdx(new FileInputStream(new File("/home/isak/Tmp/mnist/train-images-idx3-ubyte")));
// DoubleArray x = array.select(2);
// ArrayPrinter.setMinimumTruncateSize(1000000);
// System.out.println(x);
//
//
// System.out.println(Arrays.unmodifiableArray(x));
// ArrayPrinter.setVisiblePerSlice(100000);
// ArrayPrinter.setPrintSlices(1000000);
// System.out.println(java.util.Arrays.toString(x.getShape()));
// System.out.println(x);
// BufferedImage img =
// new BufferedImage(x.size(0), x.size(1), BufferedImage.TYPE_BYTE_GRAY);
//
// for (int i = 0; i < x.size(0); i++) {
// for (int j = 0; j < x.size(1); j++) {
// img.setRGB(i, j, (int) x.get(i, j));
// }
// }
// System.out.println(x.asInt());
// JFrame frame = new JFrame();
// frame.getContentPane().add(new JLabel(new ImageIcon(img)));
// frame.pack();
// frame.setVisible(true);
}
@Test public void testPlus() throws Exception {
DoubleArray a = Arrays.doubleVector(1, 2, 3, 4, 5, 6).reshape(2, 3);
DoubleArray b = Arrays.doubleVector(1,2,3);
System.out.println(a);
System.out.println(b);
System.out.println(Arrays.plus(a, b));
Arrays.plusAssign(b, a);
System.out.println(a);
}
@Test public void testBroadcast_BiFunction() throws Exception {
DoubleArray x = DoubleArray.zeros(3, 3, 3);
DoubleArray y = DoubleArray.linspace(-1, 1, 3 * 3).reshape(3, 3);
DoubleArray broadcast = Arrays.broadcastCombine(x, y, Arrays::plus);
System.out.println(broadcast);
}
@Test
public void testSwapdimensions() throws Exception {
// IntArray x = IntArray.of(0, 4, 2, 6, 1, 5, 3, 7).reshape(2, 2, 2);
IntArray x = Arrays.broadcastTo(IntArray.of(1, 2), 2, 2, 2);
System.out.println(x);
System.out.println(Arrays.swapDimension(x, 0, 2));
}
@Test
public void testBroadcast_reshape() throws Exception {
Array<String> a = Array.of("a", "b", "c");
Array<String> x = Arrays.broadcastTo(a, 6, 3).reshape(2, 9);
assertEquals(Array.of("a", "a", "a", "a", "a", "a", "b", "b", "b", "b", "b", "b", "c", "c", "c",
"c", "c", "c").reshape(2, 9), x);
}
@Test
public void testBroadcast_column_vector() throws Exception {
IntArray a = IntArray.of(0, 1, 2).reshape(3, 1);
IntArray expected = IntArray.of(0, 1, 2, 0, 1, 2, 0, 1, 2).reshape(3, 3);
assertEquals(expected, Arrays.broadcastTo(a, 3, 3));
}
@Test
public void testBroadcastTo_row_vector() throws Exception {
IntArray a = IntArray.of(0, 1, 2).reshape(1, 3);
IntArray expected = IntArray.of(0, 0, 0, 1, 1, 1, 2, 2, 2).reshape(3, 3);
assertEquals(expected, Arrays.broadcastTo(a, 3, 3));
}
@Test
public void testBroadcastTo_1darray() throws Exception {
IntArray a = IntArray.of(0, 1, 2);
IntArray y = Arrays.broadcastTo(a, 3, 3, 3);
for (int i = 0; i < y.vectors(2); i++) {
assertEquals(a, y.getVector(2, i));
}
}
@Test
public void testBroadcast() throws Exception {
IntArray a = IntArray.of(10032, 3, 3).reshape(3, 1);
System.out.println(Arrays.broadcastTo(a, 3, 3, 3));
IntArray x = IntArray.of(0, 1, 2).reshape(1, 3);
IntArray y = IntArray.of(0, 1, 2).reshape(3, 1);
System.out.println(Arrays.broadcastAll(asList(x, y)));
}
@Test
public void testVsplit2d() throws Exception {
int m = 6;
int n = 3;
IntArray x = Arrays.range(m * n).reshape(m, n);
List<IntArray> split = Arrays.vsplit(x, 3);
assertEquals(Arrays.intVector(0, 1, 6, 7, 12, 13).reshape(2, 3), split.get(0));
assertEquals(Arrays.intVector(2, 3, 8, 9, 14, 15).reshape(2, 3), split.get(1));
assertEquals(Arrays.intVector(4, 5, 10, 11, 16, 17).reshape(2, 3), split.get(2));
}
@Test
public void testVsplitnd() throws Exception {
IntArray x = Arrays.range(6 * 3 * 3).reshape(6, 3, 3);
List<IntArray> split = Arrays.vsplit(x, 3);
assertEquals(
Arrays.intVector(2, 3, 8, 9, 14, 15, 20, 21, 26, 27, 32, 33, 38, 39, 44, 45, 50, 51)
.reshape(2, 3, 3),
split.get(1));
}
@Test
public void testHsplit2d() throws Exception {
int m = 6;
int n = 3;
IntArray x = Arrays.range(n * m).reshape(n, m);
List<IntArray> split = Arrays.hsplit(x, 3);
assertEquals(Arrays.intVector(0, 1, 2, 3, 4, 5).reshape(3, 2), split.get(0));
assertEquals(Arrays.intVector(6, 7, 8, 9, 10, 11).reshape(3, 2), split.get(1));
assertEquals(Arrays.intVector(12, 13, 14, 15, 16, 17).reshape(3, 2), split.get(2));
}
@Test
public void testVstacknd() throws Exception {
IntArray x = Arrays.range(6 * 3 * 3).reshape(6, 3, 3);
List<IntArray> split = Arrays.vsplit(x, 3);
IntArray vstack = Arrays.vstack(split);
assertEquals(x, vstack);
}
@Test
public void testRepeat() throws Exception {
IntArray x = Arrays.range(3 * 3).reshape(3, 3);
assertEquals(Arrays.intVector(0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6, 7,
7, 7, 8, 8, 8), Arrays.repeat(x, 3));
}
@Test
public void testRepeatNd() throws Exception {
IntArray x = Arrays.range(3 * 3 * 3).reshape(3, 3, 3);
IntArray repeatDim2 = Arrays.repeat(2, x, 3);
IntArray repeatDim1 = Arrays.repeat(1, x, 3);
assertEquals(Arrays.intVector(3, 4, 5), repeatDim2.getVector(0, 1));
assertEquals(Arrays.intVector(1, 1, 1, 4, 4, 4, 7, 7, 7), repeatDim1.getVector(1, 1));
}
@Test
public void testTile3d() throws Exception {
IntArray x = Arrays.range(2 * 2 * 2).reshape(4, 2);
IntArray tile = Arrays.tile(x, 2, 2, 2);
assertArrayEquals(new int[] {2, 8, 4}, tile.getShape());
assertEquals(Arrays.intVector(0, 4, 0, 4), tile.getVector(2, 0));
assertEquals(Arrays.intVector(1, 5, 1, 5), tile.getVector(2, 2));
assertEquals(Arrays.intVector(2, 6, 2, 6), tile.getVector(2, 4));
assertEquals(Arrays.intVector(3, 7, 3, 7), tile.getVector(2, 6));
assertEquals(Arrays.intVector(0, 4, 0, 4), tile.getVector(2, 8));
assertEquals(Arrays.intVector(1, 5, 1, 5), tile.getVector(2, 10));
}
@Test
public void testTile() throws Exception {
IntArray a = Arrays.intVector(0, 1, 2);
IntArray a2 = Arrays.intVector(0, 1, 2, 0, 1, 2);
assertEquals(a2, Arrays.tile(a, 2));
IntArray a22 = Arrays.tile(a, 2, 2);
assertEquals(a2, a22.getVector(1, 0));
assertEquals(a2, a22.getVector(1, 1));
DoubleArray b = Arrays.doubleMatrix(new double[][] {{1, 2}, {3, 4}});
DoubleArray bexpected = Arrays
.doubleMatrix(new double[][] {{1, 2, 1, 2}, {3, 4, 3, 4}, {1, 2, 1, 2}, {3, 4, 3, 4}});
assertEquals(bexpected, Arrays.tile(b, 2, 2));
}
@Test
public void testHstackedNd() throws Exception {
IntArray x = Arrays.range(3 * 6 * 3).reshape(3, 6, 3);
List<IntArray> split = Arrays.hsplit(x, 3);
System.out.println(split);
IntArray hstack = Arrays.hstack(split);
assertEquals(x, hstack);
}
@Test
public void testMeshgrid() throws Exception {
IntArray x = Arrays.range(3);
List<IntArray> meshgrid = Arrays.meshgrid(x, x);
IntArray x1 = meshgrid.get(1);
IntArray x2 = meshgrid.get(0);
assertEquals(3, x1.size(0));
assertEquals(3, x1.size(1));
assertEquals(3, x2.size(0));
assertEquals(3, x2.size(1));
for (int i = 0; i < x2.vectors(0); i++) {
assertEquals(x, x2.getVector(0, i));
}
for (int i = 0; i < x1.vectors(1); i++) {
assertEquals(x, x1.getVector(1, i));
}
}
@Test
public void testBisectLeft() throws Exception {
IntArray a = IntArray.of(1, 2, 9, 10, 12);
assertEquals(4, Arrays.bisectLeft(a, 12));
}
@Test
public void testSort_DoubleArray() throws Exception {
DoubleArray x = DoubleArray.of(3, 2, 5, 1, 9, 3);
assertEquals(DoubleArray.of(1, 2, 3, 3, 5, 9), Arrays.sort(x));
}
@Test
public void testSort_DoubleArray_2d() throws Exception {
DoubleArray x = DoubleArray.of(3, 2, 1, 9, 8, 10, 12, 3, 1).reshape(3, 3);
DoubleArray sort = Arrays.sort(0, x);
assertEquals(DoubleArray.of(1, 2, 3, 8, 9, 10, 1, 3, 12).reshape(3, 3), sort);
}
@Test
public void testSort_IntArray() throws Exception {
IntArray x = IntArray.of(3, 2, 5, 1, 9, 3);
assertEquals(IntArray.of(1, 2, 3, 3, 5, 9), Arrays.sort(x));
}
@Test
public void testOrder() throws Exception {
DoubleArray array = DoubleArray.of(2, 3, 1, 9, 1);
assertEquals(IntArray.of(2, 4, 0, 1, 3), Arrays.order(array));
}
@Test
public void testOrderDimension() throws Exception {
DoubleArray array = DoubleArray.of(1, 9, 1, 9, 2, 4).reshape(3, 2);
assertEquals(IntArray.of(0, 2, 1, 1, 2, 0).reshape(3, 2), Arrays.order(0, array));
}
@Test
public void testConcatenate() throws Exception {
IntArray x = Arrays.range(2 * 2 * 3).reshape(2, 2, 3);
IntArray concat_0 = Arrays.concatenate(asList(x, x, x), 0);
IntArray concat_1 = Arrays.concatenate(asList(x, x, x), 1);
IntArray concat_2 = Arrays.concatenate(asList(x, x, x), 2);
IntArray expected_0 = IntArray.of(0, 1, 0, 1, 0, 1, 2, 3, 2, 3, 2, 3, 4, 5, 4, 5, 4, 5, 6, 7, 6,
7, 6, 7, 8, 9, 8, 9, 8, 9, 10, 11, 10, 11, 10, 11);
IntArray expected_1 = IntArray.of(0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 4, 5, 6, 7, 4, 5, 6, 7, 4,
5, 6, 7, 8, 9, 10, 11, 8, 9, 10, 11, 8, 9, 10, 11);
IntArray expected_2 = IntArray.of(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 0, 1, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11);
assertEquals(expected_0.reshape(6, 2, 3), concat_0);
assertEquals(expected_1.reshape(2, 6, 3), concat_1);
assertEquals(expected_2.reshape(2, 2, 9), concat_2);
}
@Test
public void testConcat_1d() throws Exception {
IntArray x = Arrays.range(10);
IntArray y = Arrays.range(10);
System.out.println(Arrays.concatenate(asList(x, y), 0));
}
@Test
public void testVstack_1d() throws Exception {
IntArray x = Arrays.range(10);
IntArray y = Arrays.range(10);
System.out.println(Arrays.vstack(asList(x, y)));
System.out.println(Arrays.hstack(asList(x, y)));
System.out.println(Arrays.concatenate(asList(x, y), 0));
}
@Test
public void testSplit_1d() throws Exception {
System.out.println(Arrays.split(Arrays.range(10), 10));
}
@Test
public void testSplit() throws Exception {
IntArray x = Arrays.range(2 * 2 * 3).reshape(2, 2, 3);
assertEquals(x, Arrays.concatenate(Arrays.split(x, 2, 0), 0));
assertEquals(x, Arrays.concatenate(Arrays.split(x, 2, 1), 1));
assertEquals(x, Arrays.concatenate(Arrays.split(x, 3, 2), 2));
}
}
| |
package org.apache.lucene.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
/**
* Creates queries from the {@link Analyzer} chain.
* <p>
* Example usage:
* <pre class="prettyprint">
* QueryBuilder builder = new QueryBuilder(analyzer);
* Query a = builder.createBooleanQuery("body", "just a test");
* Query b = builder.createPhraseQuery("body", "another test");
* Query c = builder.createMinShouldMatchQuery("body", "another test", 0.5f);
* </pre>
* <p>
* This can also be used as a subclass for query parsers to make it easier
* to interact with the analysis chain. Factory methods such as {@code newTermQuery}
* are provided so that the generated queries can be customized.
*/
public class QueryBuilder {
private Analyzer analyzer;
private boolean enablePositionIncrements = true;
/** Creates a new QueryBuilder using the given analyzer. */
public QueryBuilder(Analyzer analyzer) {
this.analyzer = analyzer;
}
/**
* Creates a boolean query from the query text.
* <p>
* This is equivalent to {@code createBooleanQuery(field, queryText, Occur.SHOULD)}
* @param field field name
* @param queryText text to be passed to the analyzer
* @return {@code TermQuery} or {@code BooleanQuery}, based on the analysis
* of {@code queryText}
*/
public Query createBooleanQuery(String field, String queryText) {
return createBooleanQuery(field, queryText, BooleanClause.Occur.SHOULD);
}
/**
* Creates a boolean query from the query text.
* <p>
* @param field field name
* @param queryText text to be passed to the analyzer
* @param operator operator used for clauses between analyzer tokens.
* @return {@code TermQuery} or {@code BooleanQuery}, based on the analysis
* of {@code queryText}
*/
public Query createBooleanQuery(String field, String queryText, BooleanClause.Occur operator) {
if (operator != BooleanClause.Occur.SHOULD && operator != BooleanClause.Occur.MUST) {
throw new IllegalArgumentException("invalid operator: only SHOULD or MUST are allowed");
}
return createFieldQuery(analyzer, operator, field, queryText, false, 0);
}
/**
* Creates a phrase query from the query text.
* <p>
* This is equivalent to {@code createPhraseQuery(field, queryText, 0)}
* @param field field name
* @param queryText text to be passed to the analyzer
* @return {@code TermQuery}, {@code BooleanQuery}, {@code PhraseQuery}, or
* {@code MultiPhraseQuery}, based on the analysis of {@code queryText}
*/
public Query createPhraseQuery(String field, String queryText) {
return createPhraseQuery(field, queryText, 0);
}
/**
* Creates a phrase query from the query text.
* <p>
* @param field field name
* @param queryText text to be passed to the analyzer
* @param phraseSlop number of other words permitted between words in query phrase
* @return {@code TermQuery}, {@code BooleanQuery}, {@code PhraseQuery}, or
* {@code MultiPhraseQuery}, based on the analysis of {@code queryText}
*/
public Query createPhraseQuery(String field, String queryText, int phraseSlop) {
return createFieldQuery(analyzer, BooleanClause.Occur.MUST, field, queryText, true, phraseSlop);
}
/**
* Creates a minimum-should-match query from the query text.
* <p>
* @param field field name
* @param queryText text to be passed to the analyzer
* @param fraction of query terms {@code [0..1]} that should match
* @return {@code TermQuery} or {@code BooleanQuery}, based on the analysis
* of {@code queryText}
*/
public Query createMinShouldMatchQuery(String field, String queryText, float fraction) {
if (Float.isNaN(fraction) || fraction < 0 || fraction > 1) {
throw new IllegalArgumentException("fraction should be >= 0 and <= 1");
}
// TODO: wierd that BQ equals/rewrite/scorer doesn't handle this?
if (fraction == 1) {
return createBooleanQuery(field, queryText, BooleanClause.Occur.MUST);
}
Query query = createFieldQuery(analyzer, BooleanClause.Occur.SHOULD, field, queryText, false, 0);
if (query instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) query;
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(bq.isCoordDisabled());
builder.setMinimumNumberShouldMatch((int) (fraction * bq.clauses().size()));
for (BooleanClause clause : bq) {
builder.add(clause);
}
query = builder.build();
}
return query;
}
/**
* Returns the analyzer.
* @see #setAnalyzer(Analyzer)
*/
public Analyzer getAnalyzer() {
return analyzer;
}
/**
* Sets the analyzer used to tokenize text.
*/
public void setAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
/**
* Returns true if position increments are enabled.
* @see #setEnablePositionIncrements(boolean)
*/
public boolean getEnablePositionIncrements() {
return enablePositionIncrements;
}
/**
* Set to <code>true</code> to enable position increments in result query.
* <p>
* When set, result phrase and multi-phrase queries will
* be aware of position increments.
* Useful when e.g. a StopFilter increases the position increment of
* the token that follows an omitted token.
* <p>
* Default: true.
*/
public void setEnablePositionIncrements(boolean enable) {
this.enablePositionIncrements = enable;
}
/**
* Creates a query from the analysis chain.
* <p>
* Expert: this is more useful for subclasses such as queryparsers.
* If using this class directly, just use {@link #createBooleanQuery(String, String)}
* and {@link #createPhraseQuery(String, String)}
* @param analyzer analyzer used for this query
* @param operator default boolean operator used for this query
* @param field field to create queries against
* @param queryText text to be passed to the analysis chain
* @param quoted true if phrases should be generated when terms occur at more than one position
* @param phraseSlop slop factor for phrase/multiphrase queries
*/
protected final Query createFieldQuery(Analyzer analyzer, BooleanClause.Occur operator, String field, String queryText, boolean quoted, int phraseSlop) {
assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST;
// Use the analyzer to get all the tokens, and then build an appropriate
// query based on the analysis chain.
try (TokenStream source = analyzer.tokenStream(field, queryText);
CachingTokenFilter stream = new CachingTokenFilter(source)) {
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
PositionIncrementAttribute posIncAtt = stream.addAttribute(PositionIncrementAttribute.class);
if (termAtt == null) {
return null;
}
// phase 1: read through the stream and assess the situation:
// counting the number of tokens/positions and marking if we have any synonyms.
int numTokens = 0;
int positionCount = 0;
boolean hasSynonyms = false;
stream.reset();
while (stream.incrementToken()) {
numTokens++;
int positionIncrement = posIncAtt.getPositionIncrement();
if (positionIncrement != 0) {
positionCount += positionIncrement;
} else {
hasSynonyms = true;
}
}
// phase 2: based on token count, presence of synonyms, and options
// formulate a single term, boolean, or phrase.
if (numTokens == 0) {
return null;
} else if (numTokens == 1) {
// single term
return analyzeTerm(field, stream);
} else if (quoted && positionCount > 1) {
// phrase
if (hasSynonyms) {
// complex phrase with synonyms
return analyzeMultiPhrase(field, stream, phraseSlop);
} else {
// simple phrase
return analyzePhrase(field, stream, phraseSlop);
}
} else {
// boolean
if (positionCount == 1) {
// only one position, with synonyms
return analyzeBoolean(field, stream);
} else {
// complex case: multiple positions
return analyzeMultiBoolean(field, stream, operator);
}
}
} catch (IOException e) {
throw new RuntimeException("Error analyzing query text", e);
}
}
/**
* Creates simple term query from the cached tokenstream contents
*/
private Query analyzeTerm(String field, TokenStream stream) throws IOException {
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
stream.reset();
if (!stream.incrementToken()) {
throw new AssertionError();
}
return newTermQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef())));
}
/**
* Creates simple boolean query from the cached tokenstream contents
*/
private Query analyzeBoolean(String field, TokenStream stream) throws IOException {
BooleanQuery.Builder q = new BooleanQuery.Builder();
q.setDisableCoord(true);
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
stream.reset();
while (stream.incrementToken()) {
Query currentQuery = newTermQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef())));
q.add(currentQuery, BooleanClause.Occur.SHOULD);
}
return q.build();
}
private void add(BooleanQuery.Builder q, BooleanQuery current, BooleanClause.Occur operator) {
if (current.clauses().isEmpty()) {
return;
}
if (current.clauses().size() == 1) {
q.add(current.clauses().iterator().next().getQuery(), operator);
} else {
q.add(current, operator);
}
}
/**
* Creates complex boolean query from the cached tokenstream contents
*/
private Query analyzeMultiBoolean(String field, TokenStream stream, BooleanClause.Occur operator) throws IOException {
BooleanQuery.Builder q = newBooleanQuery(false);
BooleanQuery.Builder currentQuery = newBooleanQuery(true);
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
stream.reset();
while (stream.incrementToken()) {
BytesRef bytes = termAtt.getBytesRef();
if (posIncrAtt.getPositionIncrement() != 0) {
add(q, currentQuery.build(), operator);
currentQuery = newBooleanQuery(true);
}
currentQuery.add(newTermQuery(new Term(field, BytesRef.deepCopyOf(bytes))), BooleanClause.Occur.SHOULD);
}
add(q, currentQuery.build(), operator);
return q.build();
}
/**
* Creates simple phrase query from the cached tokenstream contents
*/
private Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException {
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.setSlop(slop);
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
int position = -1;
stream.reset();
while (stream.incrementToken()) {
BytesRef bytes = termAtt.getBytesRef();
if (enablePositionIncrements) {
position += posIncrAtt.getPositionIncrement();
} else {
position += 1;
}
builder.add(new Term(field, bytes), position);
}
return builder.build();
}
/**
* Creates complex phrase query from the cached tokenstream contents
*/
private Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException {
MultiPhraseQuery mpq = newMultiPhraseQuery();
mpq.setSlop(slop);
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
int position = -1;
List<Term> multiTerms = new ArrayList<>();
stream.reset();
while (stream.incrementToken()) {
int positionIncrement = posIncrAtt.getPositionIncrement();
if (positionIncrement > 0 && multiTerms.size() > 0) {
if (enablePositionIncrements) {
mpq.add(multiTerms.toArray(new Term[0]), position);
} else {
mpq.add(multiTerms.toArray(new Term[0]));
}
multiTerms.clear();
}
position += positionIncrement;
multiTerms.add(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef())));
}
if (enablePositionIncrements) {
mpq.add(multiTerms.toArray(new Term[0]), position);
} else {
mpq.add(multiTerms.toArray(new Term[0]));
}
return mpq;
}
/**
* Builds a new BooleanQuery instance.
* <p>
* This is intended for subclasses that wish to customize the generated queries.
* @param disableCoord disable coord
* @return new BooleanQuery instance
*/
protected BooleanQuery.Builder newBooleanQuery(boolean disableCoord) {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(disableCoord);
return builder;
}
/**
* Builds a new TermQuery instance.
* <p>
* This is intended for subclasses that wish to customize the generated queries.
* @param term term
* @return new TermQuery instance
*/
protected Query newTermQuery(Term term) {
return new TermQuery(term);
}
/**
* Builds a new MultiPhraseQuery instance.
* <p>
* This is intended for subclasses that wish to customize the generated queries.
* @return new MultiPhraseQuery instance
*/
protected MultiPhraseQuery newMultiPhraseQuery() {
return new MultiPhraseQuery();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudfront.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* The request to list invalidations.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudfront-2020-05-31/ListInvalidations" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListInvalidationsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The distribution's ID.
* </p>
*/
private String distributionId;
/**
* <p>
* Use this parameter when paginating results to indicate where to begin in your list of invalidation batches.
* Because the results are returned in decreasing order from most recent to oldest, the most recent results are on
* the first page, the second page will contain earlier results, and so on. To get the next page of results, set
* <code>Marker</code> to the value of the <code>NextMarker</code> from the current page's response. This value is
* the same as the ID of the last invalidation batch on that page.
* </p>
*/
private String marker;
/**
* <p>
* The maximum number of invalidation batches that you want in the response body.
* </p>
*/
private String maxItems;
/**
* Default constructor for ListInvalidationsRequest object. Callers should use the setter or fluent setter (with...)
* methods to initialize the object after creating it.
*/
public ListInvalidationsRequest() {
}
/**
* Constructs a new ListInvalidationsRequest object. Callers should use the setter or fluent setter (with...)
* methods to initialize any additional object members.
*
* @param distributionId
* The distribution's ID.
*/
public ListInvalidationsRequest(String distributionId) {
setDistributionId(distributionId);
}
/**
* <p>
* The distribution's ID.
* </p>
*
* @param distributionId
* The distribution's ID.
*/
public void setDistributionId(String distributionId) {
this.distributionId = distributionId;
}
/**
* <p>
* The distribution's ID.
* </p>
*
* @return The distribution's ID.
*/
public String getDistributionId() {
return this.distributionId;
}
/**
* <p>
* The distribution's ID.
* </p>
*
* @param distributionId
* The distribution's ID.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListInvalidationsRequest withDistributionId(String distributionId) {
setDistributionId(distributionId);
return this;
}
/**
* <p>
* Use this parameter when paginating results to indicate where to begin in your list of invalidation batches.
* Because the results are returned in decreasing order from most recent to oldest, the most recent results are on
* the first page, the second page will contain earlier results, and so on. To get the next page of results, set
* <code>Marker</code> to the value of the <code>NextMarker</code> from the current page's response. This value is
* the same as the ID of the last invalidation batch on that page.
* </p>
*
* @param marker
* Use this parameter when paginating results to indicate where to begin in your list of invalidation
* batches. Because the results are returned in decreasing order from most recent to oldest, the most recent
* results are on the first page, the second page will contain earlier results, and so on. To get the next
* page of results, set <code>Marker</code> to the value of the <code>NextMarker</code> from the current
* page's response. This value is the same as the ID of the last invalidation batch on that page.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* Use this parameter when paginating results to indicate where to begin in your list of invalidation batches.
* Because the results are returned in decreasing order from most recent to oldest, the most recent results are on
* the first page, the second page will contain earlier results, and so on. To get the next page of results, set
* <code>Marker</code> to the value of the <code>NextMarker</code> from the current page's response. This value is
* the same as the ID of the last invalidation batch on that page.
* </p>
*
* @return Use this parameter when paginating results to indicate where to begin in your list of invalidation
* batches. Because the results are returned in decreasing order from most recent to oldest, the most recent
* results are on the first page, the second page will contain earlier results, and so on. To get the next
* page of results, set <code>Marker</code> to the value of the <code>NextMarker</code> from the current
* page's response. This value is the same as the ID of the last invalidation batch on that page.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* Use this parameter when paginating results to indicate where to begin in your list of invalidation batches.
* Because the results are returned in decreasing order from most recent to oldest, the most recent results are on
* the first page, the second page will contain earlier results, and so on. To get the next page of results, set
* <code>Marker</code> to the value of the <code>NextMarker</code> from the current page's response. This value is
* the same as the ID of the last invalidation batch on that page.
* </p>
*
* @param marker
* Use this parameter when paginating results to indicate where to begin in your list of invalidation
* batches. Because the results are returned in decreasing order from most recent to oldest, the most recent
* results are on the first page, the second page will contain earlier results, and so on. To get the next
* page of results, set <code>Marker</code> to the value of the <code>NextMarker</code> from the current
* page's response. This value is the same as the ID of the last invalidation batch on that page.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListInvalidationsRequest withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* <p>
* The maximum number of invalidation batches that you want in the response body.
* </p>
*
* @param maxItems
* The maximum number of invalidation batches that you want in the response body.
*/
public void setMaxItems(String maxItems) {
this.maxItems = maxItems;
}
/**
* <p>
* The maximum number of invalidation batches that you want in the response body.
* </p>
*
* @return The maximum number of invalidation batches that you want in the response body.
*/
public String getMaxItems() {
return this.maxItems;
}
/**
* <p>
* The maximum number of invalidation batches that you want in the response body.
* </p>
*
* @param maxItems
* The maximum number of invalidation batches that you want in the response body.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListInvalidationsRequest withMaxItems(String maxItems) {
setMaxItems(maxItems);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDistributionId() != null)
sb.append("DistributionId: ").append(getDistributionId()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker()).append(",");
if (getMaxItems() != null)
sb.append("MaxItems: ").append(getMaxItems());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListInvalidationsRequest == false)
return false;
ListInvalidationsRequest other = (ListInvalidationsRequest) obj;
if (other.getDistributionId() == null ^ this.getDistributionId() == null)
return false;
if (other.getDistributionId() != null && other.getDistributionId().equals(this.getDistributionId()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
if (other.getMaxItems() == null ^ this.getMaxItems() == null)
return false;
if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDistributionId() == null) ? 0 : getDistributionId().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode());
return hashCode;
}
@Override
public ListInvalidationsRequest clone() {
return (ListInvalidationsRequest) super.clone();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.textract.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Information about how blocks are related to each other. A <code>Block</code> object contains 0 or more
* <code>Relation</code> objects in a list, <code>Relationships</code>. For more information, see <a>Block</a>.
* </p>
* <p>
* The <code>Type</code> element provides the type of the relationship for all blocks in the <code>IDs</code> array.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/textract-2018-06-27/Relationship" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Relationship implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The type of relationship that the blocks in the IDs array have with the current block. The relationship can be
* <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the ID of the
* VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is a list of IDs
* that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD blocks in the case of
* Selection Elements.
* </p>
*/
private String type;
/**
* <p>
* An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code> element.
* </p>
*/
private java.util.List<String> ids;
/**
* <p>
* The type of relationship that the blocks in the IDs array have with the current block. The relationship can be
* <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the ID of the
* VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is a list of IDs
* that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD blocks in the case of
* Selection Elements.
* </p>
*
* @param type
* The type of relationship that the blocks in the IDs array have with the current block. The relationship
* can be <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the
* ID of the VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is
* a list of IDs that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD
* blocks in the case of Selection Elements.
* @see RelationshipType
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The type of relationship that the blocks in the IDs array have with the current block. The relationship can be
* <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the ID of the
* VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is a list of IDs
* that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD blocks in the case of
* Selection Elements.
* </p>
*
* @return The type of relationship that the blocks in the IDs array have with the current block. The relationship
* can be <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the
* ID of the VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is
* a list of IDs that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD
* blocks in the case of Selection Elements.
* @see RelationshipType
*/
public String getType() {
return this.type;
}
/**
* <p>
* The type of relationship that the blocks in the IDs array have with the current block. The relationship can be
* <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the ID of the
* VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is a list of IDs
* that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD blocks in the case of
* Selection Elements.
* </p>
*
* @param type
* The type of relationship that the blocks in the IDs array have with the current block. The relationship
* can be <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the
* ID of the VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is
* a list of IDs that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD
* blocks in the case of Selection Elements.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RelationshipType
*/
public Relationship withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The type of relationship that the blocks in the IDs array have with the current block. The relationship can be
* <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the ID of the
* VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is a list of IDs
* that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD blocks in the case of
* Selection Elements.
* </p>
*
* @param type
* The type of relationship that the blocks in the IDs array have with the current block. The relationship
* can be <code>VALUE</code> or <code>CHILD</code>. A relationship of type VALUE is a list that contains the
* ID of the VALUE block that's associated with the KEY of a key-value pair. A relationship of type CHILD is
* a list of IDs that identify WORD blocks in the case of lines Cell blocks in the case of Tables, and WORD
* blocks in the case of Selection Elements.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RelationshipType
*/
public Relationship withType(RelationshipType type) {
this.type = type.toString();
return this;
}
/**
* <p>
* An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code> element.
* </p>
*
* @return An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code>
* element.
*/
public java.util.List<String> getIds() {
return ids;
}
/**
* <p>
* An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code> element.
* </p>
*
* @param ids
* An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code>
* element.
*/
public void setIds(java.util.Collection<String> ids) {
if (ids == null) {
this.ids = null;
return;
}
this.ids = new java.util.ArrayList<String>(ids);
}
/**
* <p>
* An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code> element.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setIds(java.util.Collection)} or {@link #withIds(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param ids
* An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code>
* element.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Relationship withIds(String... ids) {
if (this.ids == null) {
setIds(new java.util.ArrayList<String>(ids.length));
}
for (String ele : ids) {
this.ids.add(ele);
}
return this;
}
/**
* <p>
* An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code> element.
* </p>
*
* @param ids
* An array of IDs for related blocks. You can get the type of the relationship from the <code>Type</code>
* element.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Relationship withIds(java.util.Collection<String> ids) {
setIds(ids);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getType() != null)
sb.append("Type: ").append(getType()).append(",");
if (getIds() != null)
sb.append("Ids: ").append(getIds());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Relationship == false)
return false;
Relationship other = (Relationship) obj;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false)
return false;
if (other.getIds() == null ^ this.getIds() == null)
return false;
if (other.getIds() != null && other.getIds().equals(this.getIds()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode + ((getIds() == null) ? 0 : getIds().hashCode());
return hashCode;
}
@Override
public Relationship clone() {
try {
return (Relationship) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.textract.model.transform.RelationshipMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
// Generated from C:/work/litterbin/demos/antlr-expr/src/main/java/expr3/grammar\Expr.g4 by ANTLR 4.5.1
package expr3.grammar;
import expr3.grammar.*;
import expr3.grammar.ExprVisitor;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class ExprParser extends Parser {
static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
T__0=1, T__1=2, OP_LT=3, OP_GT=4, OP_LE=5, OP_GE=6, OP_EQ=7, OP_NE=8,
OP_AND=9, OP_OR=10, OP_NOT=11, OP_ADD=12, OP_SUB=13, OP_MUL=14, OP_DIV=15,
OP_MOD=16, NULL_LITERAL=17, BOOLEAN_LITERAL=18, NUMERIC_LITERAL=19, STRING_LITERAL=20,
ID=21, WS=22;
public static final int
RULE_result = 0, RULE_expr = 1;
public static final String[] ruleNames = {
"result", "expr"
};
private static final String[] _LITERAL_NAMES = {
null, "'('", "')'", null, null, null, null, null, null, null, null, null,
"'+'", "'-'", "'*'", "'/'", "'%'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, null, null, "OP_LT", "OP_GT", "OP_LE", "OP_GE", "OP_EQ", "OP_NE",
"OP_AND", "OP_OR", "OP_NOT", "OP_ADD", "OP_SUB", "OP_MUL", "OP_DIV", "OP_MOD",
"NULL_LITERAL", "BOOLEAN_LITERAL", "NUMERIC_LITERAL", "STRING_LITERAL",
"ID", "WS"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
@Override
public String getGrammarFileName() { return "Expr.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public ATN getATN() { return _ATN; }
public ExprParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class ResultContext extends ParserRuleContext {
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public ResultContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_result; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitResult(this);
else return visitor.visitChildren(this);
}
}
public final ResultContext result() throws RecognitionException {
ResultContext _localctx = new ResultContext(_ctx, getState());
enterRule(_localctx, 0, RULE_result);
try {
enterOuterAlt(_localctx, 1);
{
setState(4);
expr(0);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExprContext extends ParserRuleContext {
public ExprContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_expr; }
public ExprContext() { }
public void copyFrom(ExprContext ctx) {
super.copyFrom(ctx);
}
}
public static class UnarySignContext extends ExprContext {
public Token op;
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public UnarySignContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitUnarySign(this);
else return visitor.visitChildren(this);
}
}
public static class ParensContext extends ExprContext {
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public ParensContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitParens(this);
else return visitor.visitChildren(this);
}
}
public static class NullLiteralContext extends ExprContext {
public TerminalNode NULL_LITERAL() { return getToken(ExprParser.NULL_LITERAL, 0); }
public NullLiteralContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitNullLiteral(this);
else return visitor.visitChildren(this);
}
}
public static class StringLiteralContext extends ExprContext {
public TerminalNode STRING_LITERAL() { return getToken(expr3.grammar.ExprParser.STRING_LITERAL, 0); }
public StringLiteralContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitStringLiteral(this);
else return visitor.visitChildren(this);
}
}
public static class VariableContext extends ExprContext {
public TerminalNode ID() { return getToken(expr3.grammar.ExprParser.ID, 0); }
public VariableContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitVariable(this);
else return visitor.visitChildren(this);
}
}
public static class LogicOpContext extends ExprContext {
public Token op;
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public TerminalNode OP_AND() { return getToken(expr3.grammar.ExprParser.OP_AND, 0); }
public TerminalNode OP_OR() { return getToken(expr3.grammar.ExprParser.OP_OR, 0); }
public LogicOpContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitLogicOp(this);
else return visitor.visitChildren(this);
}
}
public static class ComparisonOpContext extends ExprContext {
public Token op;
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public TerminalNode OP_LT() { return getToken(expr3.grammar.ExprParser.OP_LT, 0); }
public TerminalNode OP_GT() { return getToken(expr3.grammar.ExprParser.OP_GT, 0); }
public TerminalNode OP_EQ() { return getToken(expr3.grammar.ExprParser.OP_EQ, 0); }
public TerminalNode OP_NE() { return getToken(expr3.grammar.ExprParser.OP_NE, 0); }
public TerminalNode OP_LE() { return getToken(expr3.grammar.ExprParser.OP_LE, 0); }
public TerminalNode OP_GE() { return getToken(expr3.grammar.ExprParser.OP_GE, 0); }
public ComparisonOpContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitComparisonOp(this);
else return visitor.visitChildren(this);
}
}
public static class ArithmeticOpContext extends ExprContext {
public Token op;
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class, i);
}
public TerminalNode OP_MUL() { return getToken(expr3.grammar.ExprParser.OP_MUL, 0); }
public TerminalNode OP_DIV() { return getToken(expr3.grammar.ExprParser.OP_DIV, 0); }
public TerminalNode OP_MOD() { return getToken(expr3.grammar.ExprParser.OP_MOD, 0); }
public TerminalNode OP_ADD() { return getToken(expr3.grammar.ExprParser.OP_ADD, 0); }
public TerminalNode OP_SUB() { return getToken(expr3.grammar.ExprParser.OP_SUB, 0); }
public ArithmeticOpContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitArithmeticOp(this);
else return visitor.visitChildren(this);
}
}
public static class BooleanLiteralContext extends ExprContext {
public TerminalNode BOOLEAN_LITERAL() { return getToken(expr3.grammar.ExprParser.BOOLEAN_LITERAL, 0); }
public BooleanLiteralContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitBooleanLiteral(this);
else return visitor.visitChildren(this);
}
}
public static class NumericLiteralContext extends ExprContext {
public TerminalNode NUMERIC_LITERAL() { return getToken(expr3.grammar.ExprParser.NUMERIC_LITERAL, 0); }
public NumericLiteralContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((expr3.grammar.ExprVisitor<? extends T>)visitor).visitNumericLiteral(this);
else return visitor.visitChildren(this);
}
}
public static class LogicNotContext extends ExprContext {
public TerminalNode OP_NOT() { return getToken(expr3.grammar.ExprParser.OP_NOT, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class, 0);
}
public LogicNotContext(ExprContext ctx) { copyFrom(ctx); }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof expr3.grammar.ExprVisitor) return ((ExprVisitor<? extends T>)visitor).visitLogicNot(this);
else return visitor.visitChildren(this);
}
}
public final ExprContext expr() throws RecognitionException {
return expr(0);
}
private ExprContext expr(int _p) throws RecognitionException {
ParserRuleContext _parentctx = _ctx;
int _parentState = getState();
ExprContext _localctx = new ExprContext(_ctx, _parentState);
ExprContext _prevctx = _localctx;
int _startState = 2;
enterRecursionRule(_localctx, 2, RULE_expr, _p);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
setState(20);
switch (_input.LA(1)) {
case OP_ADD:
case OP_SUB:
{
_localctx = new UnarySignContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(7);
((UnarySignContext)_localctx).op = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==OP_ADD || _la==OP_SUB) ) {
((UnarySignContext)_localctx).op = (Token)_errHandler.recoverInline(this);
} else {
consume();
}
setState(8);
expr(8);
}
break;
case OP_NOT:
{
_localctx = new LogicNotContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(9);
match(OP_NOT);
setState(10);
expr(4);
}
break;
case STRING_LITERAL:
{
_localctx = new StringLiteralContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(11);
match(STRING_LITERAL);
}
break;
case BOOLEAN_LITERAL:
{
_localctx = new BooleanLiteralContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(12);
match(BOOLEAN_LITERAL);
}
break;
case NUMERIC_LITERAL:
{
_localctx = new NumericLiteralContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(13);
match(NUMERIC_LITERAL);
}
break;
case NULL_LITERAL:
{
_localctx = new NullLiteralContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(14);
match(NULL_LITERAL);
}
break;
case ID:
{
_localctx = new VariableContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(15);
match(ID);
}
break;
case T__0:
{
_localctx = new ParensContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(16);
match(T__0);
setState(17);
expr(0);
setState(18);
match(T__1);
}
break;
default:
throw new NoViableAltException(this);
}
_ctx.stop = _input.LT(-1);
setState(36);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,2,_ctx);
while ( _alt!=2 && _alt!= ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
setState(34);
switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) {
case 1:
{
_localctx = new ArithmeticOpContext(new ExprContext(_parentctx, _parentState));
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(22);
if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)");
setState(23);
((ArithmeticOpContext)_localctx).op = _input.LT(1);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << OP_MUL) | (1L << OP_DIV) | (1L << OP_MOD))) != 0)) ) {
((ArithmeticOpContext)_localctx).op = (Token)_errHandler.recoverInline(this);
} else {
consume();
}
setState(24);
expr(8);
}
break;
case 2:
{
_localctx = new ArithmeticOpContext(new ExprContext(_parentctx, _parentState));
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(25);
if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)");
setState(26);
((ArithmeticOpContext)_localctx).op = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==OP_ADD || _la==OP_SUB) ) {
((ArithmeticOpContext)_localctx).op = (Token)_errHandler.recoverInline(this);
} else {
consume();
}
setState(27);
expr(7);
}
break;
case 3:
{
_localctx = new ComparisonOpContext(new ExprContext(_parentctx, _parentState));
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(28);
if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)");
setState(29);
((ComparisonOpContext)_localctx).op = _input.LT(1);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << OP_LT) | (1L << OP_GT) | (1L << OP_LE) | (1L << OP_GE) | (1L << OP_EQ) | (1L << OP_NE))) != 0)) ) {
((ComparisonOpContext)_localctx).op = (Token)_errHandler.recoverInline(this);
} else {
consume();
}
setState(30);
expr(6);
}
break;
case 4:
{
_localctx = new LogicOpContext(new ExprContext(_parentctx, _parentState));
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(31);
if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)");
setState(32);
((LogicOpContext)_localctx).op = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==OP_AND || _la==OP_OR) ) {
((LogicOpContext)_localctx).op = (Token)_errHandler.recoverInline(this);
} else {
consume();
}
setState(33);
expr(4);
}
break;
}
}
}
setState(38);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,2,_ctx);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
unrollRecursionContexts(_parentctx);
}
return _localctx;
}
public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
switch (ruleIndex) {
case 1:
return expr_sempred((ExprContext)_localctx, predIndex);
}
return true;
}
private boolean expr_sempred(ExprContext _localctx, int predIndex) {
switch (predIndex) {
case 0:
return precpred(_ctx, 7);
case 1:
return precpred(_ctx, 6);
case 2:
return precpred(_ctx, 5);
case 3:
return precpred(_ctx, 3);
}
return true;
}
public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\30*\4\2\t\2\4\3\t"+
"\3\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3"+
"\27\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\7\3%\n\3\f\3\16"+
"\3(\13\3\3\3\2\3\4\4\2\4\2\6\3\2\16\17\3\2\20\22\3\2\5\n\3\2\13\f\62\2"+
"\6\3\2\2\2\4\26\3\2\2\2\6\7\5\4\3\2\7\3\3\2\2\2\b\t\b\3\1\2\t\n\t\2\2"+
"\2\n\27\5\4\3\n\13\f\7\r\2\2\f\27\5\4\3\6\r\27\7\26\2\2\16\27\7\24\2\2"+
"\17\27\7\25\2\2\20\27\7\23\2\2\21\27\7\27\2\2\22\23\7\3\2\2\23\24\5\4"+
"\3\2\24\25\7\4\2\2\25\27\3\2\2\2\26\b\3\2\2\2\26\13\3\2\2\2\26\r\3\2\2"+
"\2\26\16\3\2\2\2\26\17\3\2\2\2\26\20\3\2\2\2\26\21\3\2\2\2\26\22\3\2\2"+
"\2\27&\3\2\2\2\30\31\f\t\2\2\31\32\t\3\2\2\32%\5\4\3\n\33\34\f\b\2\2\34"+
"\35\t\2\2\2\35%\5\4\3\t\36\37\f\7\2\2\37 \t\4\2\2 %\5\4\3\b!\"\f\5\2\2"+
"\"#\t\5\2\2#%\5\4\3\6$\30\3\2\2\2$\33\3\2\2\2$\36\3\2\2\2$!\3\2\2\2%("+
"\3\2\2\2&$\3\2\2\2&\'\3\2\2\2\'\5\3\2\2\2(&\3\2\2\2\5\26$&";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| |
package pl.grzegorz2047.survivalcg.listeners;
import org.bukkit.Location;
import org.bukkit.entity.Arrow;
import org.bukkit.entity.Egg;
import org.bukkit.entity.Player;
import org.bukkit.entity.Snowball;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.*;
import org.bukkit.projectiles.ProjectileSource;
import pl.grzegorz2047.api.user.User;
import pl.grzegorz2047.survivalcg.SCG;
import pl.grzegorz2047.survivalcg.Fight;
/**
* Created by grzegorz2047 on 27.12.2015.
*/
public class PlayerDamagePlayerListener implements Listener {
private final SCG plugin;
public PlayerDamagePlayerListener(SCG plugin){
this.plugin = plugin;
}
@EventHandler
void onExplode(EntityExplodeEvent event){
Location loc = event.getLocation();
int protspawnrad = plugin.getManager().getSettingsManager().getProtectedSpawnRadius();
if(loc.distance(loc.getWorld().getSpawnLocation()) < protspawnrad){
event.setCancelled(true);
return;
}
}
@EventHandler
void onDamage(EntityDamageEvent event){
if (event.isCancelled()) {
return;
}
if(!(event.getEntity() instanceof Player)){
return;
}
Player p = (Player) event.getEntity();
int protspawnrad = plugin.getManager().getSettingsManager().getProtectedSpawnRadius();
if(p.getLocation().distance(p.getWorld().getSpawnLocation()) < protspawnrad){
if(event.getCause().equals(EntityDamageEvent.DamageCause.ENTITY_ATTACK)){
}
event.setCancelled(true);
return;
}
}
@EventHandler
void onEntityTarget(EntitySpawnEvent event){
if (event.isCancelled()) {
return;
}
if(!(event.getEntity() instanceof Player)){
return;
}
Player p = (Player) event.getEntity();
int protspawnrad = plugin.getManager().getSettingsManager().getProtectedSpawnRadius();
if(p.getLocation().distance(p.getWorld().getSpawnLocation()) <= protspawnrad){
event.setCancelled(true);
return;
}
}
@EventHandler
void onEntityTarget(EntityTargetLivingEntityEvent event){
if (event.isCancelled()) {
return;
}
if(!(event.getEntity() instanceof Player)){
return;
}
Player p = (Player) event.getEntity();
int protspawnrad = plugin.getManager().getSettingsManager().getProtectedSpawnRadius();
if(p.getLocation().distance(p.getWorld().getSpawnLocation()) <= protspawnrad){
event.setCancelled(true);
event.setTarget(null);
return;
}
}
@EventHandler
void onEntityDamageEntity(EntityDamageByEntityEvent event) {
if (event.isCancelled()) {
return;
}
if (event.getDamager() instanceof Player) {
if (event.getEntity() instanceof Player) {
Player attacked = (Player) event.getEntity();
Player attacker = (Player) event.getDamager();
int protspawnrad = plugin.getManager().getSettingsManager().getProtectedSpawnRadius();
if(event.getDamager().getLocation().distance(attacker.getWorld().getSpawnLocation()) < protspawnrad){
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvp-protection"));
event.setCancelled(true);
return;
}
if (checkIfGuildMembers(attacker,attacked)) {//jedno sprawdzenie powinno wystarczyc
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvpguildmember"));
event.setCancelled(true);
return;
}
if (checkIfAllies(attacker,attacked)) {//jedno sprawdzenie powinno wystarczyc
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvpguildmember"));
event.setCancelled(true);
return;
}
checkFight(attacker, attacked);
}
} else if (event.getDamager() instanceof Arrow) {
if (event.getEntity() instanceof Player) {
Player attacked = (Player) event.getEntity();
ProjectileSource attackerEntity = ((Arrow) event.getDamager()).getShooter();
if (attackerEntity instanceof Player) {
Player attacker = (Player) attackerEntity;
if (checkIfGuildMembers(attacker,attacked)) {//jedno sprawdzenie powinno wystarczyc
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvpguildmember"));
event.setCancelled(true);
return;
}
if (checkIfAllies(attacker,attacked)) {//jedno sprawdzenie powinno wystarczyc
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvpguildmember"));
event.setCancelled(true);
return;
}
int protspawnrad = plugin.getManager().getSettingsManager().getProtectedSpawnRadius();
if(event.getDamager().getLocation().distance(attacker.getWorld().getSpawnLocation()) <= protspawnrad){
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvp-protection"));
event.setCancelled(true);
return;
}
checkFight(attacker, attacked);
}
}
} else if (event.getDamager() instanceof Snowball) {
if (event.getEntity() instanceof Player) {
Player attacked = (Player) event.getEntity();
ProjectileSource attackerEntity = ((Snowball) event.getDamager()).getShooter();
if (attackerEntity instanceof Player) {
Player attacker = (Player) attackerEntity;
if (checkIfGuildMembers(attacker,attacked)) {//jedno sprawdzenie powinno wystarczyc
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvpguildmember"));
event.setCancelled(true);
return;
}
if (checkIfAllies(attacker,attacked)) {//jedno sprawdzenie powinno wystarczyc
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvpguildmember"));
event.setCancelled(true);
return;
}
int protspawnrad = plugin.getManager().getSettingsManager().getProtectedSpawnRadius();
if(event.getDamager().getLocation().distance(attacker.getWorld().getSpawnLocation()) <= protspawnrad){
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvp-protection"));
event.setCancelled(true);
return;
}
checkFight(attacker, attacked);
}
}
}else if (event.getDamager() instanceof Egg) {
if (event.getEntity() instanceof Player) {
Player attacked = (Player) event.getEntity();
ProjectileSource attackerEntity = ((Egg) event.getDamager()).getShooter();
if (attackerEntity instanceof Player) {
Player attacker = (Player) attackerEntity;
if (checkIfGuildMembers(attacker,attacked)) {//jedno sprawdzenie powinno wystarczyc
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvpguildmember"));
event.setCancelled(true);
return;
}
if (checkIfAllies(attacker,attacked)) {//jedno sprawdzenie powinno wystarczyc
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvpguildmember"));
event.setCancelled(true);
return;
}
int protspawnrad = plugin.getManager().getSettingsManager().getProtectedSpawnRadius();
if(event.getDamager().getLocation().distance(attacker.getWorld().getSpawnLocation()) <= protspawnrad){
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("pvp-protection"));
event.setCancelled(true);
return;
}
checkFight(attacker, attacked);
}
}
}
}
public boolean checkIfGuildMembers(Player attacker, Player attacked){
User victimuser = plugin.getManager().getUserManager().getUsers().get(attacked.getName());
User attackeruser = plugin.getManager().getUserManager().getUsers().get(attacker.getName());
if(victimuser.getGuild() == null || attackeruser.getGuild() == null){
return false;
}else {
return victimuser.getGuild().equals(attackeruser.getGuild());
}
}
public boolean checkIfAllies(Player attacker, Player attacked){
User victimuser = plugin.getManager().getUserManager().getUsers().get(attacked.getName());
User attackeruser = plugin.getManager().getUserManager().getUsers().get(attacker.getName());
if(attacker.getName().equals(attacked.getName())){
return false;
}
if(victimuser.getGuild() == null || attackeruser.getGuild() == null){
return false;
}else {
return attackeruser.getGuild().getAlly().contains(victimuser.getGuild().getGuildTag());
}
}
public void checkFight(Player attacker, Player attacked){
Fight vf = plugin.getManager().getAntiLogoutManager().getFightList().get(attacked.getName());
Fight af = plugin.getManager().getAntiLogoutManager().getFightList().get(attacker.getName());
if (vf == null) {
vf = new Fight(attacker.getName(), attacked.getName(), System.currentTimeMillis());
plugin.getManager().getAntiLogoutManager().getFightList().put(attacked.getName(), vf);
attacked.sendMessage(plugin.getManager().getMsgManager().getMsg("player-during-fight").replace("{TIME}",String.valueOf(vf.getCooldown())));
} else {
vf.setAttacker(attacker.getName());
vf.setVictim(attacked.getName());
vf.setLastHitTime(System.currentTimeMillis());
}
if (af == null) {
af = new Fight(attacker.getName(), attacked.getName(), System.currentTimeMillis());
plugin.getManager().getAntiLogoutManager().getFightList().put(attacker.getName(), af);
attacker.sendMessage(plugin.getManager().getMsgManager().getMsg("player-during-fight").replace("{TIME}",String.valueOf(af.getCooldown())));
} else {
af.setAttacker(attacker.getName());
af.setVictim(attacked.getName());
af.setLastHitTime(System.currentTimeMillis());
}
}
}
| |
package crazypants.enderio.teleport;
import java.awt.Color;
import java.io.IOException;
import org.lwjgl.opengl.GL11;
import com.enderio.core.client.gui.button.CheckBox;
import com.enderio.core.client.gui.widget.TextFieldEnder;
import com.enderio.core.client.render.ColorUtil;
import crazypants.enderio.EnderIO;
import crazypants.enderio.api.teleport.ITravelAccessable;
import crazypants.enderio.api.teleport.ITravelAccessable.AccessMode;
import crazypants.enderio.gui.GuiContainerBaseEIO;
import crazypants.enderio.network.GuiPacket;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.world.World;
public class GuiTravelAccessable<T extends TileEntity & ITravelAccessable> extends GuiContainerBaseEIO {
private static final int ID_PUBLIC = 0;
private static final int ID_PRIVATE = 1;
private static final int ID_PROTECTED = 2;
private CheckBox publicCB;
private CheckBox privateCB;
private CheckBox protectedCB;
private TextFieldEnder tf;
private String publicStr;
private String privateStr;
private String protectedStr;
protected T te;
private int col0x;
private int col1x;
private int col2x;
protected World world;
public GuiTravelAccessable(InventoryPlayer playerInv, T te, World world) {
this(te, new ContainerTravelAccessable(playerInv, te, world));
}
public GuiTravelAccessable(T te, ContainerTravelAccessable container) {
super(container, "travelAccessable");
this.te = te;
this.world = container.world;
publicStr = EnderIO.lang.localize("gui.travelAccessable.public");
privateStr = EnderIO.lang.localize("gui.travelAccessable.private");
protectedStr = EnderIO.lang.localize("gui.travelAccessable.protected");
FontRenderer fr = Minecraft.getMinecraft().fontRendererObj;
tf = new TextFieldEnder(fr, 28, 10, 90, 16);
col1x = 88;
col0x = (col1x - fr.getStringWidth(protectedStr) / 2) / 2;
col2x = (col1x + fr.getStringWidth(protectedStr) / 2);
col2x += (176 - col2x) / 2;
int x = 0;
int y = 50;
x = col0x - 8;
privateCB = new CheckBox(this, ID_PRIVATE, x, y);
privateCB.setSelected(te.getAccessMode() == AccessMode.PRIVATE);
x = col1x - 8;
protectedCB = new CheckBox(this, ID_PROTECTED, x, y);
protectedCB.setSelected(te.getAccessMode() == AccessMode.PROTECTED);
x = col2x - 8;
publicCB = new CheckBox(this, ID_PUBLIC, x, y);
publicCB.setSelected(te.getAccessMode() == AccessMode.PUBLIC);
ySize = 185;
textFields.add(tf);
}
@Override
protected void actionPerformed(GuiButton b) {
privateCB.setSelected(b.id == ID_PRIVATE);
protectedCB.setSelected(b.id == ID_PROTECTED);
publicCB.setSelected(b.id == ID_PUBLIC);
AccessMode curMode = b.id == ID_PRIVATE ? AccessMode.PRIVATE : b.id == ID_PROTECTED ? AccessMode.PROTECTED : AccessMode.PUBLIC;
te.setAccessMode(curMode);
GuiPacket.send(this, ContainerTravelAccessable.EXEC_ACCESS_MODE, curMode);
}
@Override
public void initGui() {
super.initGui();
buttonList.clear();
publicCB.setPaintSelectedBorder(false);
publicCB.onGuiInit();
privateCB.onGuiInit();
protectedCB.onGuiInit();
tf.setMaxStringLength(32);
tf.setFocused(true);
String txt = te.getLabel();
if(txt != null && txt.length() > 0) {
tf.setText(txt);
}
((ContainerTravelAccessable) inventorySlots).addGhostSlots(getGhostSlots());
}
@Override
public void updateScreen() {
super.updateScreen();
}
@Override
public void mouseClicked(int x, int y, int par3) throws IOException {
super.mouseClicked(x, y, par3);
}
@Override
public void drawGuiContainerBackgroundLayer(float f, int i, int j) {
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
bindGuiTexture();
int sx = (width - xSize) / 2;
int sy = (height - ySize) / 2;
drawTexturedModalRect(sx, sy, 0, 0, this.xSize, this.ySize);
int col = ColorUtil.getRGB(Color.white);
int x = sx;
int y = sy + 38;
FontRenderer fontRenderer = getFontRenderer();
x = sx + col0x - fontRenderer.getStringWidth(privateStr) / 2;
fontRenderer.drawStringWithShadow(privateStr, x, y, col);
x = sx + col1x - fontRenderer.getStringWidth(protectedStr) / 2;
fontRenderer.drawStringWithShadow(protectedStr, x, y, col);
x = sx + col2x - fontRenderer.getStringWidth(publicStr) / 2;
fontRenderer.drawStringWithShadow(publicStr, x, y, col);
checkLabelForChange();
super.drawGuiContainerBackgroundLayer(f, i, j);
}
private void checkLabelForChange() {
String newTxt = tf.getText();
if(newTxt != null && newTxt.length() == 0) {
newTxt = null;
}
String curText = te.getLabel();
if(curText != null && curText.length() == 0) {
curText = null;
}
boolean changed = false;
if(newTxt == null) {
if(curText == null) {
changed = false;
} else {
changed = true;
}
} else {
changed = !newTxt.equals(curText);
}
if(!changed) {
return;
}
te.setLabel(newTxt);
GuiPacket.send(this, ContainerTravelAccessable.EXEC_LABEL, newTxt);
}
@Override
protected void drawForegroundImpl(int mouseX, int mouseY) {
super.drawForegroundImpl(mouseX, mouseY);
if(te.getAccessMode() != AccessMode.PROTECTED) {
bindGuiTexture();
GL11.glColor4f(1, 1, 1, 0.75f);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
GL11.glDisable(GL11.GL_DEPTH_TEST);
drawTexturedModalRect(43, 72, 5, 35, 90, 18);
GL11.glDisable(GL11.GL_BLEND);
GL11.glEnable(GL11.GL_DEPTH_TEST);
GL11.glColor4f(1, 1, 1, 1);
}
}
@Override
public void drawScreen(int par1, int par2, float par3) {
super.drawScreen(par1, par2, par3);
}
}
| |
/*Copyright (c) 2004,University of Illinois at Urbana-Champaign. All rights reserved.
Developed by:
Chemistry and Computational Biology Group
NCSA, University of Illinois at Urbana-Champaign
http://ncsa.uiuc.edu/GridChem
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal with the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom
the Software is furnished to do so, subject to the following
conditions:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimers.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimers in the documentation
and/or other materials provided with the distribution.
3. Neither the names of Chemistry and Computational Biology Group , NCSA,
University of Illinois at Urbana-Champaign, nor the names of its contributors
may be used to endorse or promote products derived from this Software without
specific prior written permission.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS WITH THE SOFTWARE.
*/
/*
* Created on Apr 13, 2005
* Moved from GridChem.java @ CCS,UKy
* Indentation is four; tab stops is eight.
* some code at the tail does not follow this indentation.
*
*/
package org.gridchem.client;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import java.awt.event.KeyEvent;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.JTextPane;
import javax.swing.KeyStroke;
import javax.swing.border.Border;
import javax.swing.border.EtchedBorder;
import javax.swing.text.html.HTMLEditorKit;
import nanocad.nanocadFrame2;
import org.gridchem.client.common.Settings;
import org.gridchem.client.gui.buttons.ApplicationMenuItem;
import org.gridchem.client.gui.buttons.DropDownButton;
import org.gridchem.client.gui.login.LoginDialog;
import org.gridchem.client.gui.panels.BareBonesBrowserLaunch;
import org.gridchem.client.gui.panels.CancelCommandPrompt;
import org.gridchem.client.gui.panels.RSSViewer;
import org.gridchem.client.gui.panels.parseRSS;
import org.gridchem.client.gui.panels.myccg.MonitorVO;
import org.gridchem.client.help.HelpBrowser;
import org.gridchem.client.util.Env;
import org.gridchem.client.util.GMS3;
import org.gridchem.service.beans.JobBean;
import G03Input.G03MenuTree;
import G03Input.InputFile;
import G03Input.InputfileReader;
import G03Input.showMolEditor;
import Gamess.gamessGUI.GamessGUI;
import Nanocad3d.Nanocad3D;
public class optsComponent extends JComponent implements ActionListener, WindowListener, ComponentListener {
// public static LoginPanel lp;
public static Accounting acw;
public static PreferencesWindow pw;
public static SubmitJobsWindow sjw;
public static JobManagementWindow mw;
public static MonitorVO monitorWindow;
public static SaveDataWindow sdw;
public static RSSViewer rssw;
public static HelpBrowser helpBrowser;
public static JFrame mainFrame;
public static Nanocad3D nano3DWindow; // added for nanocad 3D - Narendra Kumar Polani (CCS, UKY)
public static nanocadFrame2 nanWindow;
public static int selectedFrontPanel = 0;
// lixh_add
private JobBean j;
JPanel buttonBox;
Container messageBox;
JButton authButton;
JButton readButton;
JButton prefButton;
JButton usageButton;
JButton g03guiButton;
//JButton submButton;
JButton mangButton;
//JButton nanocadButton;
JButton licenseButton;
JButton exitButton;
JButton helpButton;
DropDownButton inputGeneratorGuiButton;
DropDownButton moleditorGuiButton; //added -nik
// DropDownButton submGuiButton; //added -nik
JButton submGuiButton; //added -nik
ApplicationMenuItem jobMenuItem; //added -nik
ApplicationMenuItem flowMenuItem; //added -nik
ApplicationMenuItem gaussianMenuItem;
ApplicationMenuItem gamessMenuItem;
public JTextPane dyninfoPane;
public static JTextArea messageBoard;
public JTextArea inputText;
ApplicationMenuItem nano3dMenuItem; // added for nanocad 3d - Narendra Kumar Polani (CCS,UKY)
//start addition nik
ApplicationMenuItem nanoMenuItem;
ApplicationMenuItem molMenuItem;
ApplicationMenuItem gdisMenuItem;
ApplicationMenuItem jmolMenuItem;
ApplicationMenuItem tubeMenuItem;
ApplicationMenuItem javaMenuItem;
//end addition nik
public boolean nanbool = false;
//static JobList ListOfJobs = new JobList();
public optsComponent() {
// insert main control buttons
authButton = new JButton("Sign In");
readButton = new JButton("Announcements");
prefButton = new JButton("Preferences");
usageButton = new JButton("Show Usage");
// submGuiButton = createJobDDB();
submGuiButton = new JButton("Launch Experiments");
mangButton = new JButton("My SEAGrid");
moleditorGuiButton = createMolEdDDB();
//nanocadButton = new JButton("Molecular Editor");
inputGeneratorGuiButton = createDDB();
licenseButton = new JButton("View License");
exitButton = new JButton("Exit");
helpButton = new JButton("Help");
// set their tool tip texts
authButton.setToolTipText("<html><p>Authenticate to SEAGrid.</p><html>");
readButton.setToolTipText("<html><p>View real-time announcements</p>" +
"<p>from across the SEAGrid.");
prefButton.setToolTipText("<html><p>View and edit user preferences.</p><html>");
usageButton.setToolTipText("<html><p>View comprehensive individual</p>" +
"<p>and group usage across all projects.</p><html>");
//submButton.setToolTipText("<html><p>Create and submit jobs.</p><html>");
mangButton.setToolTipText(
"<html><p>Manage your experiments, monitor</p>" +
"<p>SEAGrid resources and view </p>" +
"<p>individual and group</p>" +
"<p>usage across all projects.</p><html>");
//nanocadButton.setToolTipText("<html><p>Launch the Nanocad editor.</p><html>");
moleditorGuiButton.setToolTipText("<html><p>Launch the Molecular editor.</p><html>");
licenseButton.setToolTipText("<html><p>View the full SEAGrid</p>" +
"<p>licensing agreement.</p><html>");
exitButton.setToolTipText("<html><p>Exit SEAGrid.</p><html>");
helpButton.setToolTipText("<html><p>View comprehensive help:</p>" +
"<p>documentation on SEAGrid and </p>" +
"<p>its supported applications.</p><html>");
inputGeneratorGuiButton.setDropDownToolTipText("<html><p>Launch the Gaussian/GAMESS input builder.</p>" +
"<p>Press Alt + 1 for Gaussian</p>" +
"<p>Press Alt + 2 for GAMESS</p><html>");
submGuiButton.setToolTipText("<html><p>Launch Experiment Editor.</p>+" +
"<p> Press Alt + 3 for Experiment Editor</p>");
JPanel buttonBox = new JPanel();
Container messageBox = Box.createVerticalBox();
if (Settings.WEBSERVICE) {
buttonBox.setLayout(new GridLayout(10, 1, 0, 10));
} else {
buttonBox.setLayout(new GridLayout(11, 1, 0, 10));
}
// Changed from 7 to 8 @CCS,UKy
Border rbBorder = BorderFactory.createRaisedBevelBorder();
Border eBorder1 = BorderFactory.createEmptyBorder(0, 10, 0, 0);
Border leBorder = BorderFactory.createEtchedBorder(EtchedBorder.LOWERED);
buttonBox.setBorder(eBorder1);
//buttonBox.setPreferredSize(new Dimension(180,280));
JPanel buttonBoxouter = new JPanel();
//buttonBoxouter.setBorder(BorderFactory.createCompoundBorder(eBorder1,leBorder));
buttonBoxouter.setBorder(eBorder1);
buttonBoxouter.setLayout(new BorderLayout());
buttonBoxouter.add(buttonBox, BorderLayout.CENTER);
buttonBox.add(authButton);
buttonBox.add(mangButton);
//buttonBox.add(submButton);
//buttonBox.add(prefButton);
buttonBox.add(submGuiButton);
//Commenting some not available features
//buttonBox.add(moleditorGuiButton);
//commented nik buttonBox.add(nanocadButton);
//buttonBox.add(inputGeneratorGuiButton);
if (!Settings.WEBSERVICE) {
buttonBox.add(usageButton);
}
buttonBox.add(readButton);
buttonBox.add(licenseButton);
// buttonBox.add(helpButton);
buttonBox.add(exitButton);
final ImageIcon logo = new ImageIcon(Env.getGridChemLogoLocation());
//RSSDisplayPanel rdp = new RSSDisplayPanel();
parseRSS prss = new parseRSS();
String imgtext = "<img src=\"File:///" + Env.getGridChemLogoLocation() + "\" height=50 width=50>";
//System.out.println("Image info "+ Env.getGridChemLogoLocation()+" "+imgtext);
String textinfo1 = "<div style=\"background-color:#E7EEF6; color:#000000\">" +
"<div style=\"background-color:#A7B3C7; color:#FFFFFF;\">" +
imgtext + "<font size=5> Welcome to Science and Engineering Applications Grid (SEAGrid) !!" +
"</font>" +
"<br></div>" +
"<p>You are running the " +
"<Font color='green'>SEAGrid Desktop Client </font>" +
"Application. </p>";
String textinfo2 = "<p>To use Web Portal and for more information, " +
" visit <a href='https://seagrid.org/'>https://seagrid.org/</a></div></p>";
String textinfo3 = "<p>If you do not have SEAGrid account, you may request one on the web portal." +
"</div></p>";
String textinfo4 = "<br><p><Font color='red'>Note: This version is in active development and will" +
" be auto-updated periodically.</font></p>";
URL rssurl;
try {
rssurl = new URL(Invariants.CCGRSSFeed);
dyninfoPane = new JTextPane();
dyninfoPane.setEditorKit(new HTMLEditorKit());
dyninfoPane.setText(textinfo1 + /**prss.parseRSS(rssurl)**/ textinfo2 + textinfo3 + textinfo4);
dyninfoPane.setCaretPosition(0);
} catch (MalformedURLException e) {
e.printStackTrace();
}
//messageBoard.setEditable(false);
JScrollPane jscrollpaned = new JScrollPane(dyninfoPane);
jscrollpaned.setWheelScrollingEnabled(true);
jscrollpaned.setPreferredSize(new Dimension(550, 450));
jscrollpaned.setBorder(
BorderFactory.createCompoundBorder(
BorderFactory.createEmptyBorder(5, 5, 5, 5),
jscrollpaned.getBorder()));
//lixh_add for automatic moving the scrollbar
//jscrollpane.setWheelScrollingEnabled(true);
messageBoard = new JTextArea("", 5, 55) {
};
JScrollPane jscrollpane = new JScrollPane(messageBoard);
messageBox.add(jscrollpaned);
messageBox.add(jscrollpane);
// set up the layout of the buttons
setBorder(BorderFactory.createEmptyBorder(25, 25, 25, 25)); //lixh_add
setLayout(new BoxLayout(this, BoxLayout.X_AXIS)); //lixh_add
// add each box to the layout
add(messageBox);
add(buttonBoxouter);
// listen for an action for each button
authButton.addActionListener(this);
readButton.addActionListener(this);
prefButton.addActionListener(this);
submGuiButton.addActionListener(this);
mangButton.addActionListener(this);
usageButton.addActionListener(this);
licenseButton.addActionListener(this);
exitButton.addActionListener(this);
helpButton.addActionListener(this);
// commented nik nanocadButton.addActionListener(this);
updateAuthenticatedStatus();
}
private DropDownButton createMolEdDDB() {
PopupListener popupListener = new PopupListener();
moleditorGuiButton = new DropDownButton("Open Nano CAD GUI");
moleditorGuiButton.getButton().setToolTipText(
"Open the Nanocad Molecualr Editor.");
nanoMenuItem = new ApplicationMenuItem("Nano CAD", KeyEvent.VK_5);
nanoMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_5, ActionEvent.ALT_MASK));
nanoMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the Nano CAD Molecular Editor");
nanoMenuItem.addActionListener(popupListener);
// begins: added for nanocad 3d - Narendra Kumar Polani (CCS, UKY)
nano3dMenuItem = new ApplicationMenuItem("NanoCAD 3D", KeyEvent.VK_A);
nano3dMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_A, ActionEvent.ALT_MASK));
nano3dMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the NanoCAD 3D Molecular Editor");
nano3dMenuItem.addActionListener(popupListener);
// end: added for nanocad 3D
javaMenuItem = new ApplicationMenuItem("Java ANU", KeyEvent.VK_6);
javaMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_6, ActionEvent.ALT_MASK));
javaMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the Java ANU Molecular Editor");
javaMenuItem.addActionListener(popupListener);
molMenuItem = new ApplicationMenuItem("Mol Den", KeyEvent.VK_7);
molMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_7, ActionEvent.ALT_MASK));
molMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the Mol Den Molecular Editor");
molMenuItem.addActionListener(popupListener);
gdisMenuItem = new ApplicationMenuItem("GDIS", KeyEvent.VK_8);
gdisMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_8, ActionEvent.ALT_MASK));
gdisMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the GDIS Molecular Editor");
gdisMenuItem.addActionListener(popupListener);
jmolMenuItem = new ApplicationMenuItem("JMol", KeyEvent.VK_9);
jmolMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_9, ActionEvent.ALT_MASK));
jmolMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the JMol Molecular Editor");
jmolMenuItem.addActionListener(popupListener);
tubeMenuItem = new ApplicationMenuItem("Tube Gen", KeyEvent.VK_0);
tubeMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_0, ActionEvent.ALT_MASK));
tubeMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the Tube Gen Molecular Editor");
tubeMenuItem.addActionListener(popupListener);
moleditorGuiButton.getMenu().add(nano3dMenuItem); // added for nanocad 3d - Narendra Kumar Polani (CCS, UKY)
moleditorGuiButton.getMenu().add(nanoMenuItem);
moleditorGuiButton.getMenu().add(javaMenuItem);
moleditorGuiButton.getMenu().add(molMenuItem);
moleditorGuiButton.getMenu().add(gdisMenuItem);
moleditorGuiButton.getMenu().add(jmolMenuItem);
moleditorGuiButton.getMenu().add(tubeMenuItem);
return moleditorGuiButton;
}
// private JButton createJobDDB() {
// PopupListener popupListener = new PopupListener();
//
// submGuiButton = new DropDownButton("Create Job");
// submGuiButton.getButton().setToolTipText(
// "Open the Job/Work Flow Editor.");
// jobMenuItem = new ApplicationMenuItem("Job", KeyEvent.VK_3);
// jobMenuItem.setAccelerator(KeyStroke.getKeyStroke(
// KeyEvent.VK_3, ActionEvent.ALT_MASK));
// jobMenuItem.getAccessibleContext().setAccessibleDescription(
// "Opens the Job Editor");
// jobMenuItem.addActionListener(popupListener);
//
// flowMenuItem = new ApplicationMenuItem("Work Flow", KeyEvent.VK_4);
// flowMenuItem.setAccelerator(KeyStroke.getKeyStroke(
// KeyEvent.VK_4, ActionEvent.ALT_MASK));
// flowMenuItem.getAccessibleContext().setAccessibleDescription(
// "Opens the Work Flow Editor");
// flowMenuItem.addActionListener(popupListener);
//
// submGuiButton.getMenu().add(jobMenuItem);
// submGuiButton.getMenu().add(flowMenuItem);
//
// return submGuiButton;
// }
private DropDownButton createDDB() {
PopupListener popupListener = new PopupListener();
inputGeneratorGuiButton = new DropDownButton("Open Gaussian GUI");
inputGeneratorGuiButton.getButton().setToolTipText(
"Open the Gaussian input builder.");
gaussianMenuItem = new ApplicationMenuItem("Gaussian", KeyEvent.VK_1);
gaussianMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_1, ActionEvent.ALT_MASK));
gaussianMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the Gaussian GUI");
gaussianMenuItem.addActionListener(popupListener);
gamessMenuItem = new ApplicationMenuItem("GAMESS", KeyEvent.VK_2);
gamessMenuItem.setAccelerator(KeyStroke.getKeyStroke(
KeyEvent.VK_2, ActionEvent.ALT_MASK));
gamessMenuItem.getAccessibleContext().setAccessibleDescription(
"Opens the GAMESS GUI");
gamessMenuItem.addActionListener(popupListener);
inputGeneratorGuiButton.getMenu().add(gaussianMenuItem);
inputGeneratorGuiButton.getMenu().add(gamessMenuItem);
return inputGeneratorGuiButton;
}
CancelCommandPrompt progressCancelPrompt;
private void startWaiting(String title, String labelText, SwingWorker worker) {
progressCancelPrompt =
new CancelCommandPrompt(this,title,labelText,-1,worker);
}
private void updateProgress(int value) {
progressCancelPrompt.updateStatus();
}
private void updateProgress(String message) {
progressCancelPrompt.updateStatus(message);
}
private void updateProgress(String message,int value) {
progressCancelPrompt.updateStatus();
progressCancelPrompt.updateStatus(message);
}
private void stopWaiting() {
if (progressCancelPrompt != null) {
progressCancelPrompt.finished();
progressCancelPrompt = null;
}
}
public class PopupListener implements ActionListener {
public void actionPerformed(ActionEvent event) {
ApplicationMenuItem item = (ApplicationMenuItem) event.getSource();
if (item.equals(gaussianMenuItem)) {
stuffInside.selectedGUI = 1;
showNewGUI();
gaussianMenuItem.setLastSelected(true);
gamessMenuItem.setLastSelected(false);
} else if (item.equals(gamessMenuItem)) {
stuffInside.selectedGUI = 1;
GamessGUI.main(null);
gaussianMenuItem.setLastSelected(false);
gamessMenuItem.setLastSelected(true);
} else if (item.equals(nano3dMenuItem)) {
// added for nanocad 3d - Narendra Kumar Polani (CCS, UKY)
selectedFrontPanel = 1;
doCallNanoCad3D();
} else if (item.equals(nanoMenuItem)) {
//jayeeta added following two lines.
System.out.println("Launching Nanocad Molecular Editor");
selectedFrontPanel = 1;
doCallNanocad();
} else if (item.equals(javaMenuItem)) {
doCallJavaANU();
} else if (item.equals(molMenuItem)) {
doCallMolDen();
} else if (item.equals(gdisMenuItem)) {
doCallGDIS();
} else if (item.equals(jmolMenuItem)) {
doCallJMol();
} else if (item.equals(tubeMenuItem)) {
doCallTubeGen();
} else if (item.equals(jobMenuItem)) {//else if (e.getSource() == submButton) {
CheckAuth ca = new CheckAuth();
if (ca.authorized) {
if (Settings.WEBSERVICE) {
SwingWorker worker = new SwingWorker() {
@Override
public Object construct() {
updateProgress("Loading");
SubmitJobsWindow.getInstance();
updateProgress("Finished loading");
return null;
}
@Override
public void finished() {
stopWaiting();
}
};
progressCancelPrompt = new CancelCommandPrompt(buttonBox,"Loading Submit Experiments","Please wait few seconds", -1,worker);
worker.start();
}
} else {
doWarning();
}
} else if (item.equals(flowMenuItem)) {
//final String[] newarray = new String[14];
//The following try block added to read the proxy information from a
//file instead of hardcoding it here.. -nikhil dec 14 2009
/*try{
FileInputStream fis = new FileInputStream(Env.getApplicationDataDir() +
Settings.fileSeparator + "xbaya_proxy");
DataInputStream dis = new DataInputStream(new BufferedInputStream(fis));
for(int i =0; i < 14; i++){
newarray[i]=dis.readLine();
}
dis.close();
fis.close();
//System.out.println("this is new array "+newarray[10]);
}catch (FileNotFoundException ie) {
newarray[0] = "-exitOnClose";
newarray[1] = "false";
newarray[2] = "-myProxyServer";
newarray[3] = "portal.leadproject.org";
newarray[4] = "-myProxyUsername";
newarray[5] = "gridchem";
newarray[6] = "-myProxyPassphrase";
newarray[7] = "gc2009";
newarray[8] = "-xRegistryURL";
newarray[9] = "https://gw26.quarry.iu.teragrid.org:6666/xregistry";
newarray[10] = "-gfacRegistryURL";
newarray[11] = "https://tyr16.cs.indiana.edu:23443";
newarray[12] = "-gfacURL";
newarray[13] = "https://tyr16.cs.indiana.edu:23443";
ie.printStackTrace();
}
catch (Exception ie) {System.out.println("Exception in optsComponent"+ie);}*/
CheckAuth ca = new CheckAuth();
if (ca.authorized) {
try {
Process p = Runtime.getRuntime().exec("javaws " + Invariants.XBayaJnlpURL);
System.out.println("Exit XBaya");
} catch (IOException e) {
e.printStackTrace();
}
} else {
doWarning();
}
}
if (item.equals(nanoMenuItem) || item.equals(javaMenuItem)
|| item.equals(molMenuItem) || item.equals(gdisMenuItem)
|| item.equals(jmolMenuItem) || item.equals(tubeMenuItem)) {
moleditorGuiButton.getButton().setText("Open " + item.getText() + " GUI");
moleditorGuiButton.getButton().setToolTipText("Open " + item.getText() + " GUI");
} else if (item.equals(gaussianMenuItem) || item.equals(gamessMenuItem)) {
inputGeneratorGuiButton.getButton().setText("Open " + item.getText() + " GUI");
inputGeneratorGuiButton.getButton().setToolTipText("Open " + item.getText() + " GUI");
} else if (item.equals(jobMenuItem)) {
// submGuiButton.getButton().setText("Create Job");
// submGuiButton.getButton().setToolTipText("Open Job Editor GUI");
submGuiButton.setText("Create Job");
submGuiButton.setToolTipText("Open Job Editor GUI");
} else if (item.equals(flowMenuItem)) {
// submGuiButton.getButton().setText("Create Work Flow");
// submGuiButton.getButton().setToolTipText("Open Work Flow Editor GUI");
}
//inputGeneratorGuiButton.getButton().setText("Open " + item.getText() + " GUI");
//inputGeneratorGuiButton.getButton().setToolTipText("Open " + item.getText() + " GUI");
}
}
public static void showNewGUI() {
JFrame.setDefaultLookAndFeelDecorated(true);
JDialog.setDefaultLookAndFeelDecorated(true);
InputFile.tempinput = new String();
InputfileReader.route = new String();
showMolEditor.tempmol = new String();
InputFile.inputfetched = 0;
InputfileReader.chrgStr = null;
InputfileReader.mulStr = null;
mainFrame = new G03Input.G03MenuTree();
mainFrame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
mainFrame.pack();
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
mainFrame.setSize(screenSize.width - 200, screenSize.height - 150);
mainFrame.setResizable(true);
mainFrame.setVisible(true);
}
public void actionPerformed(ActionEvent e) {
Trace.entry();
if (e.getSource() == authButton) {
// Authentication is handled a bit differently now
// Here when a user selects to reauthenticate, we
// wipe their entire session clean of preferences,
// Settings variables, their VO, projects, and
// their session EPR. We also reset every panel.
// This is done in LoginPanel.clearLogin().
if (Settings.authenticated) {
int result = JOptionPane.showConfirmDialog(
this,
"Do you really want to disconnect from " +
"this \n resource and authenticate to " +
"another resource?",
"Authentication",
JOptionPane.YES_NO_OPTION
);
if (result == 0) {
GridChem.appendMessage("Resetting user authentication...");
LoginDialog.clearLogin();
GridChem.appendMessage("Complete\n");
updateAuthenticatedStatus();
doAuthentication();
}
} else {
updateAuthenticatedStatus();
doAuthentication();
}
} else if (e.getSource() == readButton) {
if (Settings.WEBSERVICE) {
if (rssw == null) {
doReadAnnouncements();
} else {
rssw.frame.setVisible(true);
}
} else {
doLaunchBrowser();
}
} else if (e.getSource() == prefButton) {
CheckAuth ca = new CheckAuth();
if (ca.authorized) {
if (Settings.WEBSERVICE) {
if (pw == null) {
doPreferences();
} else {
PreferencesWindow.frame.setVisible(true);
}
// } else {
// Trace.note( "GetFile.getfileisdone = " + GetFile.getfileisdone);
// Trace.note( "LoginPanel.isprefFile = " + LoginPanel.isprefFile);
// if (GetFile.getfileisdone && LoginPanel.isprefFile) {
// if (pw == null) {
// doPreferences();
// } else {
// PreferencesWindow.frame.setVisible(true);
// }
// } else {
// JOptionPane.showMessageDialog(
// this,
// "Getting preferences log file " +
// "(preferences.hist)... Please wait\n",
// "Get Preferences",
// JOptionPane.INFORMATION_MESSAGE
// );
// }
}
} else {
doWarning();
}
// }else if (e.getSource() == usageButton) {
// // Usage button in only in pre-ws client. in ws client, usage info
// // is in the MonitorVO panel.
// CheckAuth ca = new CheckAuth();
// if (ca.authorized) {
// Trace.note( "GetFile.getfileisdone = " + GetFile.getfileisdone);
// if (GetFile.getfileisdone && LoginPanel.isprefFile) {
// if (acw == null) {
// acw = new Accounting(LoginStage1.reply);
// acw.pack();
// acw.setVisible(true);
// } else {
// acw.setVisible(true);
// }
// } else {
// JOptionPane.showMessageDialog(
// this,
// "Getting usage log file " +
// "... Please wait\n",
// "Get Usage Table",
// JOptionPane.INFORMATION_MESSAGE
// );
// }
// } else {
// doWarning();
// }
} else if (e.getSource() == mangButton) {
/* Expose the monitoring and file management utilities
* contained in the ManageJobs or MonitorVO Panel depending
* on client version.
*/
CheckAuth ca = new CheckAuth();
if (ca.authorized) {
if (GridChem.user != null) {
SwingWorker worker = new SwingWorker() {
@Override
public Object construct() {
updateProgress("Loading");
doMonitor();
updateProgress("Finished loading");
return null;
}
@Override
public void finished() {
stopWaiting();
}
};
progressCancelPrompt = new CancelCommandPrompt(buttonBox,"Loading MySEAGrid","Please wait few seconds", -1,worker);
worker.start();
} else {
JOptionPane.showMessageDialog(
this,
"Monitoring information not available. \n" +
"User did not successfully log into the CCG.",
"Authentication Failed",
JOptionPane.INFORMATION_MESSAGE
);
}
} else {
doWarning();
}
} else if (e.getSource() == licenseButton) {
doLicense();
} else if (e.getSource() == helpButton) {
doHelp();
} else if (e.getSource() == exitButton) {
int result1 = JOptionPane.showConfirmDialog(
this,
"Do you really want to close the client? ",
"Confirmation",
JOptionPane.YES_NO_OPTION
);
if (result1 == 0) {
if (Settings.authenticated) {
GMS3.logout();
}
doShutdown();
}
} else if (e.getSource() == submGuiButton) {
CheckAuth ca = new CheckAuth();
if (ca.authorized) {
if (Settings.WEBSERVICE) {
SwingWorker worker = new SwingWorker() {
@Override
public Object construct() {
updateProgress("Loading");
SubmitJobsWindow.getInstance();
updateProgress("Finished loading");
return null;
}
@Override
public void finished() {
stopWaiting();
}
};
progressCancelPrompt = new CancelCommandPrompt(buttonBox,"Loading Launch Experiments","Please wait few seconds", -1,worker);
worker.start();
}
} else {
doWarning();
}
}
/*
else if (e.getSource() == nanocadButton) {
System.out.println("Launching Nanocad Molecular Editor");
CheckAuth ca = new CheckAuth();
selectedFrontPanel=1;
//if (ca.authorized) {
if (Settings.WEBSERVICE)
doCallNanocad();
// else {
// if (GetFile.getfileisdone && LoginPanel.ishistFile) {
// doCallNanocad();
// } else {
// JOptionPane.showMessageDialog(
// this,
// "Getting job log file(qcrjm.hist)... " +
// "Please wait\n",
// "Get log file",
// JOptionPane.INFORMATION_MESSAGE
// );
// }
// }
// } else {
// doWarning();
//}
}*/
Trace.exit();
} // End of public void actionPerformed(ActionEvent e)
public void setAuthButton(String text, String tooltiptext) {
authButton.setText(text);
authButton.setToolTipText(tooltiptext);
}
public void doShutdown() {
// if(lp != null) lp.setVisible(false);
if (acw != null) acw.setVisible(false);
if (pw != null) pw.setVisible(false);
if (sjw != null) sjw.setVisible(false);
if (mw != null) mw.setVisible(false);
if (monitorWindow != null) monitorWindow.setVisible(false);
if (sdw != null) sdw.si.setVisible(false);
if (rssw != null) rssw.setVisible(false);
if (nanWindow != null) nanWindow.setVisible(false);
System.exit(0);
}
/**
* Open the authentication panel where users can authenticate
* to the GridChem middleware server. This must be the first
* action the user takes in order to perform accountable
* actions with the client. Since the LoginPanel is disposed
* at shutdown and when the user confirms they wish to re-
* authenticate, we get a new instance of LoginPanel every time
* this is called.
*/
public void doAuthentication() {
messageBoard.append("Authentication requested...\n");
messageBoard.setCaretPosition(messageBoard.getDocument().getLength()); //lixh_add
new LoginDialog(mainFrame, true);
}
/**
* Open the announcement panel where the user can find
* new and exciting information on GridChem via RSS from
* the gridchem website.
*/
public void doReadAnnouncements() {
/*messageBoard.append("Reading announcements...\n");*/
//BareBonesBrowserLaunch.openURL("http://www.gridchem.org");
rssw = new RSSViewer(Invariants.CCGRSSFeed);
//rssw.setSize(new Dimension(350,400));
}
/**
* pre-ws client launches a browser to veiw announcements.
*/
public void doLaunchBrowser() {
BareBonesBrowserLaunch.openURL("http://www.gridchem.org");
}
/**
* Open user preferences pane where user can specify the look and feel
* they wish to remember from session to session.
*/
public void doPreferences() {
if (pw == null) {
pw = new PreferencesWindow();
} else {
pw.setVisible(true);
}
messageBoard.append("Checking preferences...\n");
messageBoard.setCaretPosition(messageBoard.getDocument().getLength()); //lixh_3/27/06
}
// /**
// * Open submit jobs window where user can submit a job
// * based on their authentication method.
// *
// */
// public void doSubmission()
// {
// if(sjw == null) {
// sjw = SubmitJobsWindow.getInstance();
// } else {
// sjw.setVisible(true);
// }
// messageBoard.append("Submit Jobs Window opened...\n");
// messageBoard.setCaretPosition( messageBoard.getDocument().getLength()); //lixh_add
// }
/**
* Open manage job window where the user can monitor jobs
* they have submitted with the client and manage the data
* associated with each job through our file browser and
* output parsers.
*/
public void doManagement() {
if (mw == null) {
if (Settings.authenticatedSSH) {
// mw = new SSHManageWindow();
} else {
mw = new ManageWindow();
}
} else {
mw.setVisible(true);
}
messageBoard.append("Manage Jobs window opened...\n");
messageBoard.setCaretPosition(messageBoard.getDocument().getLength());
}
/**
* Open tabbed pane displaying job, resource, and usage information for the
* user's current VO.
*/
public void doMonitor() {
if (monitorWindow == null) {
monitorWindow = new MonitorVO();
} else {
monitorWindow.refresh();
monitorWindow.setVisible(true);
}
GridChem.appendMessage("Monitoring window opened...\n");
}
/**
* Download remote data for viewing.
*
* @deprecated This method is deprecated as of GridChem 0.3.
*/
public void doExplore() {
messageBoard.append("Explore data...\n");
JOptionPane.showMessageDialog(null,
"Coming soon...", "explore data",
JOptionPane.INFORMATION_MESSAGE);
}
/**
* Display GridChem public license in dialog box.
*/
public void doLicense() {
String message = "GridChem: Portal to the Computational Chemistry Grid!!\n\n" +
"Developed by: \n\n" +
"CCS, University of Kentucky\n" +
"CCT, Louisiana State University\n" +
"NCSA, University of Illinois at Urbana-Champaign\n" +
"OSC, Ohio Supercomputer Center\n" +
"TACC, University of Texas at Austin\n\n" +
"http://www.gridchem.org/\n\n" +
"Copyright (c) 2004,University of Illinois at Urbana-Champaign. All rights reserved.\n\n" +
"Permission is hereby granted, free of charge, to any person obtaining a copy \n" +
"of this software and associated documentation files (the \"Software\"),to deal with \n" +
"the Software without restriction, including without limitation the rights to use, \n" +
"copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the \n" +
"Software, and to permit persons to whom the Software is furnished to do so, \n" +
"subject to the following conditions:\n\n" +
"1. Redistributions of source code must retain the above copyright notice, \n" +
" this list of conditions and the following disclaimers.\n" +
"2. Redistributions in binary form must reproduce the above copyright notice, \n" +
" this list of conditions and the following disclaimers in the documentation \n" +
" and/or other materials provided with the distribution.\n" +
"3. Neither the names of Chemistry and Computational Biology Group , NCSA, \n" +
" University of Illinois at Urbana-Champaign, nor the names of its contributors \n" +
" may be used to endorse or promote products derived from this Software without \n" +
" specific prior written permission.\n\n" +
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n" +
"KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n" +
"WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n" +
"PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n" +
"CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n" +
"DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n" +
"TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH\n" +
"THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE SOFTWARE.\n\n";
ScrollableDisplay sd = new ScrollableDisplay("GridChem License", message);
sd.setVisible(true);
/*JOptionPane.showMessageDialog(null,
"Welcome to GridChem: Portal to the Computational Chemistry Grid!!\n" +
"Developed by: \n\n" +
"CCS, University of Kentucky\n" +
"CCT, Louisiana State University\n" +
"NCSA, University of Illinois at Urbana-Champaign\n" +
"OSC, Ohio Supercomputer Center\n" +
"TACC, University of Texas at Austin\n\n" +
"https://www.gridchem.org/\n\n" +
"Copyright (c) 2004,University of Illinois at Urbana-Champaign. All rights reserved.\n\n" +
"Permission is hereby granted, free of charge, to any person obtaining a copy of " +
"this software and associated documentation files (the \"Software\"),to deal with " +
"the Software without restriction, including without limitation the rights to use, " +
"copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the " +
"Software, and to permit persons to whom the Software is furnished to do so, " +
"subject to the following conditions:\n\n" +
"1. Redistributions of source code must retain the above copyright notice, \n" +
" this list of conditions and the following disclaimers.\n" +
"2. Redistributions in binary form must reproduce the above copyright notice, \n" +
" this list of conditions and the following disclaimers in the documentation \n" +
" and/or other materials provided with the distribution.\n" +
"3. Neither the names of Chemistry and Computational Biology Group , NCSA, \n" +
" University of Illinois at Urbana-Champaign, nor the names of its contributors \n" +
" may be used to endorse or promote products derived from this Software without \n" +
" specific prior written permission.\n\n" +
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, " +
"EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF " +
"MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT." +
"IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR " +
"ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, " +
"TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE " +
"OR THE USE OR OTHER DEALINGS WITH THE SOFTWARE.\n\n",
"GridChem Public License",
JOptionPane.INFORMATION_MESSAGE);*/
}
/**
* Throw up an authentication warning. This should be handled in
* an error code.
*/
public void doWarning() {
JOptionPane.showMessageDialog(null, "You must authenticate " +
"to use this function", "GridChem",
JOptionPane.ERROR_MESSAGE);
}
/**
* Delete the files on the local system associated with the job.
*
* @param directoryFile
*/
public void doDeletion(File directoryFile) {
java.util.List list = Arrays.asList(directoryFile.listFiles());
ArrayList directories = new ArrayList(list);
ArrayList toBeDeleted = new ArrayList();
ArrayList dirsToBeDeleted = new ArrayList();
for (int i = 0; i < directories.size(); i++) {
File f = (File) directories.get(i);
if (f.isDirectory() == true) {
dirsToBeDeleted.add(f);
doDeletion(f);
} else {
toBeDeleted.add(f);
}
}
for (int i = 0; i < dirsToBeDeleted.size(); i++) {
File g = (File) dirsToBeDeleted.get(i);
System.err.println("doDeletion: deleting file " +
g.getName());
g.delete();
}
for (int i = 0; i < toBeDeleted.size(); i++) {
File t = (File) toBeDeleted.get(i);
System.err.println("doDeletion: deleting file " +
t.getName());
t.delete();
}
}
public void doHelp() {
if (helpBrowser == null) {
helpBrowser = new HelpBrowser();
} else {
helpBrowser.showHelp();
}
}
/*Added nikhil*/
public void doCallMolDen() {
JOptionPane.showMessageDialog(null, "doCallMolGen");
try {
Runtime.getRuntime().exec(".\\etc\\MolDen\\molden_windows_nt_95\\molden.exe");
} catch (Exception err) {
err.printStackTrace();
}
}
public void doCallGDIS() {
//JOptionPane.showMessageDialog(null, "doCallGDIS");
try {
Runtime.getRuntime().exec(".\\etc\\GDIS\\gdis\\gdis.exe");
} catch (Exception err) {
err.printStackTrace();
}
}
public void doCallJMol() {
try {
String[] cmdarray = {"javaws",
Invariants.JmolJnlpURL};
Runtime.getRuntime().exec(cmdarray);
} catch (Exception err) {
err.printStackTrace();
}
}
public void doCallTubeGen() {
JOptionPane.showMessageDialog(null, "doCallTubeGen");
try {
Runtime.getRuntime().exec(".\\etc\\TubeGen\\tubegen.exe");
} catch (Exception err) {
err.printStackTrace();
}
}
public void doCallJavaANU() {
try {
String[] cmdarray = {"javaws",
Invariants.JamberooJnlpURL};
Runtime.getRuntime().exec(cmdarray);
} catch (Exception err) {
err.printStackTrace();
}
}
/* End additions */
public void doCallNanoCad3D() {
Trace.entry();
System.out.println(" Calling NanoCAD 3D");
String setsfile = ".settings";
boolean append = false;
File sets = new File(Settings.defaultDirStr + Settings.fileSeparator
+ setsfile);
try {
FileWriter fw = new FileWriter(sets, append);
fw.write("Username= " + Settings.name.getText() + "\n");
fw.write("CGI= " + Invariants.httpsGateway + "\n");
fw.close();
FileWriter fw2 = new FileWriter(Settings.defaultDirStr
+ Settings.fileSeparator + "loadthis", append);
fw2.write(Settings.defaultDirStr + Settings.fileSeparator
+ "common" + Settings.fileSeparator + "Molecule"
+ Settings.fileSeparator + "Inorganic"
+ Settings.fileSeparator + "water.pdb\n");
fw2.close();
} catch (IOException ioe) {
}
String tmpfile = "tmp.txt";
File fa = new File(Env.getApplicationDataDir() + Settings.fileSeparator
+ tmpfile);
if (fa.exists()) {
fa.delete();
}
// launch nanocad
if (Settings.VERBOSE) System.out.println("Calling NanoCAD 3D Main");
nano3DWindow = new Nanocad3D();
nano3DWindow.setVisible(true);
nano3DWindow.addWindowListener(this);
nano3DWindow.addComponentListener(this);
//Nanocad3D.glcanvas.requestFocusInWindow();
System.out.println(" Done with NanoCAD 3D");
Trace.exit();
}
public void doCallNanocad() {
System.out.println(" Calling Nanocad");
String setsfile = ".settings";
boolean append = false;
File sets = new File(Settings.defaultDirStr + Settings.fileSeparator
+ setsfile);
try {
FileWriter fw = new FileWriter(sets, append);
fw.write("Username= " + Settings.name.getText() + "\n");
fw.write("CGI= " + Invariants.httpsGateway + "\n");
fw.close();
FileWriter fw2 = new FileWriter(Settings.defaultDirStr
+ Settings.fileSeparator + "loadthis", append);
fw2.write(Settings.defaultDirStr + Settings.fileSeparator
+ "common" + Settings.fileSeparator + "Molecule"
+ Settings.fileSeparator + "Inorganic"
+ Settings.fileSeparator + "water.pdb\n");
fw2.close();
} catch (IOException ioe) {
}
String tmpfile = "tmp.txt";
File fa = new File(Env.getApplicationDataDir() + Settings.fileSeparator
+ tmpfile);
if (fa.exists()) {
fa.delete();
}
// launch nanocad
if (Settings.VERBOSE) System.out.println("Calling nanocadMain");
nanWindow = new nanocadFrame2();
nanWindow.addWindowListener(this);
nanWindow.nano.addComponentListener(this);
System.out.println(" Done with Nanocad");
}
public void changeInputText(String i) {
try {
// this.inputText = new JTextArea(i, 20,40);
//inputText.selectAll();
//inputText.replaceSelection(i);
//inputText.setCaretPosition(0);
// inputText.append(i);
//jayeeta added following lines
showMolEditor.tempmol = i;
//Set the Label in G03MenuTree
G03MenuTree.nanocadNotice.setText("Molecular Specification Imported from Nanocad");
//tempmol+="\n"+i;
} catch (RuntimeException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// public void enableButtons(boolean enable) {
// readButton.setEnabled(enable);
// prefButton.setEnabled(enable);
// submButton.setEnabled(enable);
// saveButton.setEnabled(enable);
// nanocadButton.setEnabled(enable);
// licenseButton.setEnabled(enable);
// helpButton.setEnabled(enable);
// mangButton.setEnabled(enable);
// if (!Settings.WEBSERVICE) {
// usageButton.setEnabled(enable);
// }
// }
public void updateAuthenticatedStatus() {
prefButton.setEnabled(Settings.authenticated);
mangButton.setEnabled(Settings.authenticated);
// commented nik submButton.setEnabled(Settings.authenticated);
//submGuiButton.setEnabled(Settings.authenticated);
submGuiButton.setEnabled(Settings.authenticated);
if (!Settings.WEBSERVICE) {
usageButton.setEnabled(Settings.authenticated);
}
if (Settings.authenticated) {
setAuthButton("Sign Out",
"<html><p>Disconnect from the SEAGrid.</p><html>");
} else {
setAuthButton("Sign In",
"<html><p>Authenticate to the SEAGrid.</p><html>");
}
}
/* WindowListener interface definition methods */
public void windowOpened(WindowEvent e) {
}
public void windowClosed(WindowEvent e) {
}
public void windowIconified(WindowEvent e) {
}
public void windowDeiconified(WindowEvent e) {
}
public void windowActivated(WindowEvent e) {
}
public void windowDeactivated(WindowEvent e) {
}
public void windowClosing(WindowEvent e) {
Trace.entry();
String name = "";
// check for temp file and if it exists, load into text box
System.err.println("editingStuff:load tmp.txt file here!");
// File f = new File(Settings.defaultDirStr +
File f = new File(Env.getApplicationDataDir() + Settings.fileSeparator
+ "tmp.txt");
if ((f.exists()))// && !(f.isEmpty()))
{
try {
BufferedReader inStream = new BufferedReader(new FileReader(f));
String text = "";
String line;
while ((line = inStream.readLine()) != null) {
int n = line.length();
if (n > 0) {
text = text + line + "\n";
System.err.println(line);
}
}
inStream.close();
changeInputText(text);
} catch (IOException ioe) {
System.err.println("IOException in editJobPanel");
}
}
name = e.getWindow().getClass().getName();
name = name.substring(name.lastIndexOf(".") + 1);
if (name.equalsIgnoreCase("Nanocad3D")) {
nano3DWindow.dispose();
Nanocad3D.glcanvas.setVisible(false);
/*if(nano3DWindow.glcanvas != null){
nano3DWindow.glcanvas.setVisible(false);
}*/
nanbool = false;
optsComponent.selectedFrontPanel = 0;
} else if (name.equalsIgnoreCase("nanocadFrame2")) {
nanWindow.dispose();
if (nanWindow.nano.t != null)
nanWindow.nano.t.setVisible(false);
nanbool = false;
optsComponent.selectedFrontPanel = 0;
}
Trace.exit();
}
/* ComponentListener interface definition methods */
public void componentMoved(ComponentEvent e) {
}
public void componentResized(ComponentEvent e) {
}
public void componentShown(ComponentEvent e) {
}
public void componentHidden(ComponentEvent e) {
System.err.println("load temp file here!");
// File f = new File(Settings.defaultDirStr +
File f = new File(Env.getApplicationDataDir() + Settings.fileSeparator
+ "tmp.txt");
if ((f.exists())) {
try {
BufferedReader inStream = new BufferedReader(new FileReader(f));
String text = "";
String line;
while ((line = inStream.readLine()) != null) {
int n = line.length();
if (n > 0) {
text = text + line + "\n";
System.err.println(line);
}
}
inStream.close();
changeInputText(text);
} catch (IOException ioe) {
System.err.println("IOException in editJobPanel");
}
}
/* if (nanWindow.nano.t.isActive()) {
if (nanWindow.nano.t.isVisible()) {
nanWindow.nano.t.setVisible(false);
}
}*/
nanWindow.dispose();
optsComponent.selectedFrontPanel = 0;
/*
JOptionPane.showMessageDialog(null, "Go to Nanocad via the 'Submit Jobs' Window to use this option",
"Information Message", JOptionPane.INFORMATION_MESSAGE);
*/
}
}
| |
package net.meisen.general.genmisc.raster.definition.impl.date;
import static org.junit.Assert.assertEquals;
import java.math.BigDecimal;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import net.meisen.general.genmisc.collections.Collections;
import net.meisen.general.genmisc.raster.data.IRasterModelData;
import net.meisen.general.genmisc.raster.data.impl.BaseModelData;
import net.meisen.general.genmisc.raster.definition.IRaster;
import net.meisen.general.genmisc.raster.definition.IRasterModel;
import net.meisen.general.genmisc.raster.definition.RasterModelEntryType;
import net.meisen.general.genmisc.raster.definition.impl.BaseRasterModel;
import net.meisen.general.genmisc.raster.definition.impl.BaseRasterModelEntry;
import net.meisen.general.genmisc.raster.function.impl.BucketLabel;
import net.meisen.general.genmisc.raster.function.impl.Const;
import net.meisen.general.genmisc.raster.function.impl.Count;
import net.meisen.general.genmisc.raster.function.impl.Group;
import net.meisen.general.genmisc.raster.function.impl.IntervalSum;
import net.meisen.general.genmisc.raster.function.impl.Value;
import net.meisen.general.genmisc.raster.utilities.DateRasterUtilities;
import net.meisen.general.genmisc.raster.utilities.GeneralUtilities;
import net.meisen.general.genmisc.raster.utilities.RasterUtilities;
import org.junit.Test;
/**
* Tests the implementation of a <code>Raster</code> using the <code>Date</code>
* -<code>RasterLogic</code>
*
* @author pmeisen
*
*/
@SuppressWarnings("ConstantConditions")
public class TestDateRaster {
/**
* The name of the <code>RasterModel</code> which is created and added to
* the <code>RasterModel</code> in this testcase
*/
private static String MODELNAME = "MODEL";
/**
* The name of the interval-start data-field in the <code>ModelData</code>
*/
private static String MD_INT_START = "INT_START";
/**
* The name of the interval-end data-field in the <code>ModelData</code>
*/
private static String MD_INT_END = "INT_END";
/**
* The name of the interval-start data-field in the
* <code>RasterModelData</code>
*/
private static String MRD_INT_START = "START";
/**
* The name of the interval-end data-field in the
* <code>RasterModelData</code>
*/
private static String MRD_INT_END = "END";
/**
* Creates a <code>Raster</code> with a specific <code>RasterModel</code>
*
* @param granularity
* the <code>DateGranularity</code> of the <code>Raster</code>
* created
* @param bucketSize
* the bucket-size used
* @param locale
* the <code>Locale</code> used
* @return the created <code>Raster</code>
*/
private IRaster<Date> createRasterWithModel(
final DateGranularity granularity, final int bucketSize,
final Locale locale) {
// create the models
final Map<String, IRasterModel> models = new HashMap<String, IRasterModel>();
final Object[] paramStart = { MD_INT_START };
final Object[] paramEnd = { MD_INT_END };
final IRasterModel model = RasterUtilities.createRasterModel(
MRD_INT_START, MRD_INT_END, new Value(), new Value(),
paramStart, paramEnd);
models.put(MODELNAME, model);
// create the raster
final IRaster<Date> raster = DateRasterUtilities.createDateRaster(
granularity, bucketSize, locale, models);
return raster;
}
/**
* Tests the result of adding nothing to the <code>Raster</code>
*/
@SuppressWarnings("ConstantConditions")
@Test
public void testEmptyData() {
final DateGranularity m = DateGranularity.MINUTES;
final Locale de = new Locale("de");
// create the raster
final IRaster<Date> raster = createRasterWithModel(m, 1, de);
// add some functions to the model
final BaseRasterModel model = (BaseRasterModel) raster
.getConfiguration().getModel(MODELNAME);
model.addEntry(new BaseRasterModelEntry("TEST",
RasterModelEntryType.VALUE, new Count()));
// test the result
final Collection<IRasterModelData> rasterData = raster
.getRasterModelData(MODELNAME);
assertEquals(rasterData.size(), 1440);
for (int i = 0; i < rasterData.size(); i++) {
final IRasterModelData rmd = Collections.get(i, rasterData);
assertEquals(rmd.get("TEST"), Integer.valueOf(0));
}
}
/**
* Tests the result of adding some <code>ModelData</code> to the
* <code>Raster</code> using the <code>Count</code>-
* <code>RasterFunction</code>
*/
@Test
public void testCountedData() {
final DateGranularity m = DateGranularity.MINUTES;
final Locale de = new Locale("de");
// create the raster
final IRaster<Date> raster = createRasterWithModel(m, 1, de);
// add some functions to the model
final BaseRasterModel model = (BaseRasterModel) raster
.getConfiguration().getModel(MODELNAME);
model.addEntry(new BaseRasterModelEntry("TEST",
RasterModelEntryType.VALUE, new Count()));
// begin the tests
BaseModelData md;
Collection<IRasterModelData> rasterData;
// create data for the whole interval
md = new BaseModelData();
md.setValue(MD_INT_START,
GeneralUtilities.getDate("01.01.2010 00:00:00"));
md.setValue(MD_INT_END, GeneralUtilities.getDate("01.01.2010 24:00:00"));
raster.addModelData(md);
// test the result
rasterData = raster.getRasterModelData(MODELNAME);
assertEquals(rasterData.size(), 1440);
for (int i = 0; i < rasterData.size(); i++) {
final IRasterModelData rmd = Collections.get(i, rasterData);
assertEquals(rmd.get("TEST"), Integer.valueOf(1));
}
// create data for half a day
md = new BaseModelData();
md.setValue(MD_INT_START,
GeneralUtilities.getDate("01.01.2010 00:00:00"));
md.setValue(MD_INT_END, GeneralUtilities.getDate("01.01.2010 12:00:00"));
raster.addModelData(md);
// test the result
rasterData = raster.getRasterModelData(MODELNAME);
assertEquals(rasterData.size(), 1440);
for (int i = 0; i < rasterData.size(); i++) {
final IRasterModelData rmd = Collections.get(i, rasterData);
if (i < 720) {
assertEquals(rmd.get("TEST"), Integer.valueOf(2));
} else {
assertEquals(rmd.get("TEST"), Integer.valueOf(1));
}
}
}
/**
* Some complex test using the <code>Raster</code> and some
* <code>RasterFunctions</code>
*/
@Test
public void testGroupAndFunctions() {
final DateGranularity m = DateGranularity.MINUTES;
final Locale de = new Locale("de");
// create the raster
final IRaster<Date> raster = createRasterWithModel(m, 30, de);
// add some functions to the model
final BaseRasterModel model = (BaseRasterModel) raster
.getConfiguration().getModel(MODELNAME);
model.addEntry(new BaseRasterModelEntry("GROUPER0",
RasterModelEntryType.GROUP, new Const(), "Const"));
model.addEntry(new BaseRasterModelEntry("GROUPER1",
RasterModelEntryType.GROUP, new Value(), "GROUP1"));
model.addEntry(new BaseRasterModelEntry("GROUPER2",
RasterModelEntryType.GROUP, new Value(), "GROUP2"));
model.addEntry(new BaseRasterModelEntry("COUNT",
RasterModelEntryType.VALUE, new Count()));
model.addEntry(new BaseRasterModelEntry("COUNTNONULLS",
RasterModelEntryType.VALUE, new Count(), "ISNULL"));
model.addEntry(new BaseRasterModelEntry("SUM",
RasterModelEntryType.VALUE, new IntervalSum()));
model.addEntry(new BaseRasterModelEntry("GROUP",
RasterModelEntryType.VALUE, new Group(),
"[GROUPER1] of [GROUPER2] (Const: [GROUPER0])"));
model.addEntry(new BaseRasterModelEntry("VALUER0",
RasterModelEntryType.VALUE, new Value(), "VALUE0"));
model.addEntry(new BaseRasterModelEntry("LABELER",
RasterModelEntryType.VALUE, new BucketLabel(),
"%1$tH:%1$tM - %2$tH:%2$tM"));
BaseModelData modelData;
Collection<IRasterModelData> rasterModelData;
// now create different ModelData to be added
modelData = new BaseModelData();
modelData.setValue("GROUP1", "Planned");
modelData.setValue("GROUP2", "Cleaner");
modelData.setValue("ISNULL", null);
modelData.setValue("VALUE0", "Some Value");
modelData.setValue(MD_INT_START,
GeneralUtilities.getDate("20.01.1981 00:00:00"));
modelData.setValue(MD_INT_END,
GeneralUtilities.getDate("20.01.1981 00:45:00"));
raster.addModelData(modelData);
// add the data and check results
rasterModelData = raster.getAll();
assertEquals(rasterModelData.size(), 48);
IRasterModelData d;
d = Collections.get(0, rasterModelData);
assertEquals(d.get("COUNT"), Integer.valueOf(1));
assertEquals(d.get("COUNTNONULLS"), Integer.valueOf(0));
assertEquals(d.get("SUM"), new BigDecimal(30.0));
assertEquals(d.get("GROUP"), "Planned of Cleaner (Const: Const)");
assertEquals(d.get("VALUER0"), "Some Value");
assertEquals(d.get("LABELER"), "00:00 - 00:30");
d = Collections.get(1, rasterModelData);
assertEquals(d.get("COUNT"), Integer.valueOf(1));
assertEquals(d.get("COUNTNONULLS"), Integer.valueOf(0));
assertEquals(d.get("SUM"), new BigDecimal(15.0));
assertEquals(d.get("GROUP"), "Planned of Cleaner (Const: Const)");
assertEquals(d.get("VALUER0"), "Some Value");
assertEquals(d.get("LABELER"), "00:30 - 01:00");
int i = 0;
for (final IRasterModelData dRest : rasterModelData) {
if (i > 1) {
assertEquals(dRest.get("COUNT"), Integer.valueOf(0));
assertEquals(dRest.get("COUNTNONULLS"), Integer.valueOf(0));
assertEquals(dRest.get("SUM"), new BigDecimal(0));
assertEquals(dRest.get("GROUP"), "Planned of Cleaner (Const: Const)");
assertEquals(dRest.get("VALUER0"), "Some Value");
}
i++;
}
// now create different ModelData to be added
modelData = new BaseModelData();
modelData.setValue("GROUP1", "Real");
modelData.setValue("GROUP2", "Cleaner");
modelData.setValue("ISNULL", null);
modelData.setValue(MD_INT_START, GeneralUtilities.getDate("20.01.1981 00:18:00"));
modelData.setValue(MD_INT_END, GeneralUtilities.getDate("20.01.1981 00:54:00"));
raster.addModelData(modelData);
// add the data and check results
rasterModelData = raster.getAll();
assertEquals(rasterModelData.size(), 96);
d = Collections.get(0, rasterModelData);
assertEquals(d.get("COUNT"), Integer.valueOf(1));
assertEquals(d.get("COUNTNONULLS"), Integer.valueOf(0));
assertEquals(d.get("SUM"), new BigDecimal(12.0));
assertEquals(d.get("GROUP"), "Real of Cleaner (Const: Const)");
assertEquals(d.get("LABELER"), "00:00 - 00:30");
d = Collections.get(1, rasterModelData);
assertEquals(d.get("COUNT"), Integer.valueOf(1));
assertEquals(d.get("COUNTNONULLS"), Integer.valueOf(0));
assertEquals(d.get("SUM"), new BigDecimal(24.0));
assertEquals(d.get("GROUP"), "Real of Cleaner (Const: Const)");
assertEquals(d.get("LABELER"), "00:30 - 01:00");
i = 0;
for (final IRasterModelData dRest : rasterModelData) {
if (i > 1 && i < 48) {
assertEquals(dRest.get("COUNT"), Integer.valueOf(0));
assertEquals(dRest.get("COUNTNONULLS"), Integer.valueOf(0));
assertEquals(dRest.get("SUM"), new BigDecimal(0));
assertEquals(dRest.get("GROUP"),
"Real of Cleaner (Const: Const)");
}
i++;
}
}
}
| |
/*****************************************************************************
* Source code information
* -----------------------
* Original author Ian Dickinson, HP Labs Bristol
* Author email ian_dickinson@users.sourceforge.net
* Package Jena
* Created 16 Jan 2001
* Filename $RCSfile: DAML_OIL.java,v $
* Revision $Revision: 1.2 $
* Release status Preview-release $State: Exp $
*
* Last modified on $Date: 2009/10/06 13:04:43 $
* by $Author: ian_dickinson $
*
* (c) Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
* (see footer for full conditions)
*****************************************************************************/
// Package
///////////////
package com.hp.hpl.jena.vocabulary;
// Imports
///////////////
import com.hp.hpl.jena.rdf.model.*;
/**
* <p>
* Defines a vocabulary of RDF constants used by the latest release of DAML+oil
* for class and property names. The contract maintained by the DAML_OIL class
* is that it will be periodically updated to reflect the latest release of DAML+OIL.
* Currently this is the March 2001 release. Older versions of the DAML+OIL
* vocabulary, for compatability with older ontology documents, are maintained in
* classes named <code>DAML_OIL_YYYY_MM</code>, for example DAML_OIL_2000_12.
* </p>
* <p>
* <b>Note</b> that rudimentary support for multiple versions of DAML namespaces is
* included in this release, by the mechanism of providing methods with the same
* names as the static constants in the {@link DAMLVocabulary} interface. This mechanism
* is still under design review, and may change in future versions of Jena.
* </p>
* <p>
* Note also that the capitalisation of
* the constants herein is designed to follow normal practice in the RDF community, rather
* than normal practice in the Java community. This is consistent with the capitalisation
* of constants in Jena.
* </p>
*
* @author Ian Dickinson, HP Labs (<a href="mailto:ian_dickinson@users.sourceforge.net">email</a>)
* @version CVS info: $Id: DAML_OIL.java,v 1.2 2009/10/06 13:04:43 ian_dickinson Exp $
*/
public class DAML_OIL
implements DAMLVocabulary
{
// Constants
//////////////////////////////////
/** Singleton instance reference */
private static DAML_OIL s_instance = new DAML_OIL();
/** Model to hold the vocab resoures */
private static Model s_model = ModelFactory.createDefaultModel();
/* Resources */
/** DAML namespace resource for the current release */
public static final Resource NAMESPACE_DAML = s_model.createResource( NAMESPACE_DAML_2001_03_URI );
/** DAML namespace resource for daml:collection. Note: strictly
* daml:collection is only a string, not a resource in the DAML namespace. */
public static final Resource collection = s_model.createResource( NAMESPACE_DAML_2001_03_URI+"collection" );
/** RDF resource for DAML List class. */
public static final Resource List = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "List" );
/** RDF resource for DAML UniqueProperty class */
public static final Resource UniqueProperty = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "UniqueProperty" );
/** RDF resource for DAML TransitiveProperty class */
public static final Resource TransitiveProperty = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "TransitiveProperty" );
/** RDF resource for DAML UnambiguousProperty class */
public static final Resource UnambiguousProperty = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "UnambiguousProperty" );
/** RDF resource for DAML Restriction class */
public static final Resource Restriction = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "Restriction" );
/** RDF resource for DAML Ontology class */
public static final Resource Ontology = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "Ontology" );
/** RDF resource for the nil (empty) list. */
public static final Resource nil = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "nil" );
/** RDF resource for the top type (i.e. the super-type of all types). */
public static final Resource Thing = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "Thing" );
/** RDF resource for the bottom type (i.e. the super-type of no types). */
public static final Resource Nothing = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "Nothing" );
/** Alias for rdfs:Literal in the daml namespace. */
public static final Resource Literal = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "Literal" );
/** RDF resource for DAML Class class (a DAML sub-class of rdfs:Class). */
public static final Resource Class = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "Class" );
/** RDF resource for DAML Datatype class (a DAML sub-class of rdfs:Class). */
public static final Resource Datatype = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "Datatype" );
/** RDF resource for DAML DatatypeProperty class (a DAML sub-class of rdf:Property). */
public static final Resource DatatypeProperty = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "DatatypeProperty" );
/** RDF resource for DAML ObjectProperty class (a DAML sub-class of rdf:Property). */
public static final Resource ObjectProperty = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "ObjectProperty" );
/** Alias for rdfs:Property in the daml namespace. From 2001/03 onwards, use of ObjectProperty or DatatypeProperty is suggested. */
public static final Resource Property = s_model.createResource( NAMESPACE_DAML_2001_03_URI + "Property" );
/* Properties */
/** RDF Property for the DAML versionInfo property */
public static Property versionInfo = null;
/** RDF Property for the DAML imports property on Ontologies */
public static Property imports = null;
/** RDF Property for the DAML disjointWith property on Classes */
public static Property disjointWith = null;
/** RDF Property for the DAML disjointUnionOf property on Classes */
public static Property disjointUnionOf = null;
/** RDF Property for the DAML sameClassAs property on Classes */
public static Property sameClassAs = null;
/** RDF Property for the DAML samePropertyAs property on Properties */
public static Property samePropertyAs = null;
/** RDF Property for the oneOf property on DAML class expressions */
public static Property oneOf = null;
/** RDF Property for the intersectionOf property on class expressions */
public static Property intersectionOf = null;
/** RDF Property for the unionOf property on class expressions */
public static Property unionOf = null;
/** RDF Property for the complementOf property on class expressions */
public static Property complementOf = null;
/** RDF Property for the equivalentTo property on DAML values */
public static Property equivalentTo = null;
/** RDF Property for the DAML onProperty property on Restrictions */
public static Property onProperty = null;
/** RDF Property for the DAML toClass property on Restrictions */
public static Property toClass = null;
/** RDF Property for the DAML hasValue property on Restrictions */
public static Property hasValue = null;
/** RDF Property for the DAML hasClass property on Restrictions */
public static Property hasClass = null;
/** RDF Property for the DAML hasClassQ property on Restrictions */
public static Property hasClassQ = null;
/** RDF Property for the DAML cardinality property on Restrictions */
public static Property cardinality = null;
/** RDF Property for the DAML minCardinality property on Restrictions */
public static Property minCardinality = null;
/** RDF Property for the DAML maxCardinality property on Restrictions */
public static Property maxCardinality = null;
/** RDF Property for the DAML cardinalityQ property on Restrictions */
public static Property cardinalityQ = null;
/** RDF Property for the DAML minCardinalityQ property on Restrictions */
public static Property minCardinalityQ = null;
/** RDF Property for the DAML maxCardinalityQ property on Restrictions */
public static Property maxCardinalityQ = null;
/** RDF Property for the DAML inverseOf property on Properties */
public static Property inverseOf = null;
/** RDF Property for the DAML first property on Lists */
public static Property first = null;
/** RDF Property for the DAML rest property on Lists */
public static Property rest = null;
/** RDF Property for the DAML item property on Lists */
public static Property item = null;
/** Alias for rdfs:subPropertyOf in daml namespace */
public static Property subPropertyOf = null;
/** Alias for rdf:type in daml namespace */
public static Property type = null;
/** Alias for rdf:value in daml namespace */
public static Property value = null;
/** Alias for rdfs:subClassOf in daml namespace */
public static Property subClassOf = null;
/** Alias for rdfs:domain in daml namespace */
public static Property domain = null;
/** Alias for rdfs:range in daml namespace */
public static Property range = null;
/** Alias for rdfs:label in daml namespace */
public static Property label = null;
/** Alias for rdfs:comment in daml namespace */
public static Property comment = null;
/** Alias for rdfs:seeAlso in daml namespace */
public static Property seeAlso = null;
/** Alias for rdfs:isDefinedBy in daml namespace */
public static Property isDefinedBy = null;
/** RDF Property for the DAML sameIndividualAs property on instances */
public static Property sameIndividualAs = null;
/** RDF Property for the DAML differentIndvidualFrom property on instances */
public static Property differentIndividualFrom = null;
// Static variables
//////////////////////////////////
static {
// properties:
versionInfo = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "versionInfo" );
imports = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "imports" );
disjointWith = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "disjointWith" );
disjointUnionOf = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "disjointUnionOf" );
sameClassAs = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "sameClassAs" );
samePropertyAs = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "samePropertyAs" );
equivalentTo = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "equivalentTo" );
oneOf = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "oneOf" );
intersectionOf = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "intersectionOf" );
unionOf = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "unionOf" );
complementOf = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "complementOf" );
onProperty = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "onProperty" );
toClass = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "toClass" );
hasValue = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "hasValue" );
hasClass = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "hasClass" );
hasClassQ = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "hasClassQ" );
cardinality = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "cardinality" );
cardinalityQ = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "cardinalityQ" );
minCardinality = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "minCardinality" );
minCardinalityQ = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "minCardinalityQ" );
maxCardinality = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "maxCardinality" );
maxCardinalityQ = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "maxCardinalityQ" );
inverseOf = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "inverseOf" );
first = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "first" );
rest = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "rest" );
item = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "item" );
subPropertyOf = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "subPropertyOf" );
type = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "type" );
value = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "value" );
subClassOf = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "subClassOf" );
domain = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "domain" );
range = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "range" );
label = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "label" );
comment = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "comment" );
seeAlso = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "seeAlso" );
isDefinedBy = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "isDefinedBy" );
sameIndividualAs = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "sameIndividualAs" );
differentIndividualFrom = s_model.createProperty( NAMESPACE_DAML_2001_03_URI, "differentIndividualFrom" );
}
// Instance variables
//////////////////////////////////
// Constructors
//////////////////////////////////
/**
* Constructor is private to ensure that only a single instance is available.
*/
private DAML_OIL() {
}
// External signature methods
//////////////////////////////////
/**
* Answer the namespace URI for a the current vocabulary. Note that this value is used to
* construct the constants in the vocabulary, assuring that they are in the correct namespace
* for this release.
*
* @return the namespace URI as a string.
*/
public String getNamespace() {
return NAMESPACE_DAML_2001_03_URI;
}
/**
* Answer the singleton instance of this vocabulary
*
* @return The singleton instance
*/
public static DAMLVocabulary getInstance() {
return s_instance;
}
// The following methods provide the implementations for the DAMLVocabulary interface
/** Answer the DAML namespace resource for the current release */
public Resource NAMESPACE_DAML() { return NAMESPACE_DAML; }
/** Answer the RDF resource for DAML List class. */
public Resource List() { return List; }
/** Answer the RDF resource for DAML UniqueProperty class */
public Resource UniqueProperty() { return UniqueProperty; }
/** Answer the RDF resource for DAML TransitiveProperty class */
public Resource TransitiveProperty() { return TransitiveProperty; }
/** Answer the RDF resource for DAML UnambiguousProperty class */
public Resource UnambiguousProperty() { return UnambiguousProperty; }
/** Answer the RDF resource for DAML Restriction class */
public Resource Restriction() { return Restriction; }
/** Answer the RDF resource for DAML Ontology class */
public Resource Ontology() { return Ontology; }
/** Answer the RDF resource for the nil (empty) list. */
public Resource nil() { return nil; }
/** Answer the RDF resource for the top type (i.e. the super-type of all types). */
public Resource Thing() { return Thing; }
/** Answer the RDF resource for the bottom type (i.e. the super-type of no types). */
public Resource Nothing() { return Nothing; }
/** Answer the Alias for rdfs:Literal in the daml namespace. */
public Resource Literal() { return Literal; }
/** Answer the RDF resource for DAML Class class (a DAML sub-class of rdfs:Class). */
public Resource Class() { return Class; }
/** Answer the RDF resource for DAML Datatype class (a DAML sub-class of rdfs:Class). */
public Resource Datatype() { return Datatype; }
/** Answer the RDF resource for DAML DatatypeProperty class (a DAML sub-class of rdf:Property). */
public Resource DatatypeProperty() { return DatatypeProperty; }
/** Answer the RDF resource for DAML ObjectProperty class (a DAML sub-class of rdf:Property). */
public Resource ObjectProperty() { return ObjectProperty; }
/* Properties */
/** Answer the RDF Property for the DAML versionInfo property */
public Property versionInfo() { return versionInfo; }
/** Answer the RDF Property for the DAML imports property on Ontologies */
public Property imports() { return imports; }
/** Answer the RDF Property for the DAML disjointWith property on Classes */
public Property disjointWith() { return disjointWith; }
/** Answer the RDF Property for the DAML disjointUnionOf property on Classes */
public Property disjointUnionOf() { return disjointUnionOf; }
/** Answer the RDF Property for the DAML sameClassAs property on Classes */
public Property sameClassAs() { return sameClassAs; }
/** Answer the RDF Property for the DAML samePropertyAs property on Properties */
public Property samePropertyAs() { return samePropertyAs; }
/** Answer the RDF Property for the oneOf property on DAML class expressions */
public Property oneOf() { return oneOf; }
/** Answer the RDF Property for the intersectionOf property on class expressions */
public Property intersectionOf() { return intersectionOf; }
/** Answer the RDF Property for the unionOf property on class expressions */
public Property unionOf() { return unionOf; }
/** Answer the RDF Property for the complementOf property on class expressions */
public Property complementOf() { return complementOf; }
/** Answer the RDF Property for the equivalentTo property on DAML values */
public Property equivalentTo() { return equivalentTo; }
/** Answer the RDF Property for the DAML onProperty property on Restrictions */
public Property onProperty() { return onProperty; }
/** Answer the RDF Property for the DAML toClass property on Restrictions */
public Property toClass() { return toClass; }
/** Answer the RDF Property for the DAML hasValue property on Restrictions */
public Property hasValue() { return hasValue; }
/** Answer the RDF Property for the DAML hasClass property on Restrictions */
public Property hasClass() { return hasClass; }
/** Answer the RDF Property for the DAML hasClassQ property on Restrictions */
public Property hasClassQ() { return hasClassQ; }
/** Answer the RDF Property for the DAML cardinality property on Restrictions */
public Property cardinality() { return cardinality; }
/** Answer the RDF Property for the DAML minCardinality property on Restrictions */
public Property minCardinality() { return minCardinality; }
/** Answer the RDF Property for the DAML maxCardinality property on Restrictions */
public Property maxCardinality() { return maxCardinality; }
/** Answer the RDF Property for the DAML cardinalityQ property on Restrictions */
public Property cardinalityQ() { return cardinalityQ; }
/** Answer the RDF Property for the DAML minCardinalityQ property on Restrictions */
public Property minCardinalityQ() { return minCardinalityQ; }
/** Answer the RDF Property for the DAML maxCardinalityQ property on Restrictions */
public Property maxCardinalityQ() { return maxCardinalityQ; }
/** Answer the RDF Property for the DAML inverseOf property on Properties */
public Property inverseOf() { return inverseOf; }
/** Answer the RDF Property for the DAML first property on Lists */
public Property first() { return first; }
/** Answer the RDF Property for the DAML rest property on Lists */
public Property rest() { return rest; }
/** Answer the RDF Property for the DAML item property on Lists */
public Property item() { return item; }
/** Answer the alias for rdfs:subPropertyOf in daml namespace */
public Property subPropertyOf() { return subPropertyOf; }
/** Answer the alias for rdf:type in daml namespace */
public Property type() { return type; }
/** Answer the alias for rdf:value in daml namespace */
public Property value() { return value; }
/** Answer the alias for rdfs:subClassOf in daml namespace */
public Property subClassOf() { return subClassOf; }
/** Answer the alias for rdfs:domain in daml namespace */
public Property domain() { return domain; }
/** Answer the alias for rdfs:range in daml namespace */
public Property range() { return range; }
/** Answer the alias for rdfs:label in daml namespace */
public Property label() { return label; }
/** Answer the alias for rdfs:comment in daml namespace */
public Property comment() { return comment; }
/** Answer the alias for rdfs:seeAlso in daml namespace */
public Property seeAlso() { return seeAlso; }
/** Answer the alias for rdfs:isDefinedBy in daml namespace */
public Property isDefinedBy() { return isDefinedBy; }
/** Answer the RDF Property for the DAML sameIndividualAs property on instances */
public Property sameIndividualAs() { return sameIndividualAs; }
/** Answer the RDF Property for the DAML differentIndvidualFrom property on instances */
public Property differentIndividualFrom() { return differentIndividualFrom; }
/** Answer the alias for rdfs:Property in the daml namespace. From 2001/03 onwards, it is preferable to use either DatatypeProperty or ObjectProperty. */
public Resource Property() { return Property; }
}
/*
(c) Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
| |
package tars.logic;
import static tars.commons.core.Messages.MESSAGE_DUPLICATE_TAG;
import static tars.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static tars.commons.core.Messages.MESSAGE_INVALID_TAG_DISPLAYED_INDEX;
import java.util.ArrayList;
import org.junit.Test;
import tars.logic.commands.RedoCommand;
import tars.logic.commands.TagCommand;
import tars.logic.commands.UndoCommand;
import tars.model.Tars;
import tars.model.tag.ReadOnlyTag;
import tars.model.tag.Tag;
import tars.model.task.ReadOnlyTask;
import tars.model.task.Task;
import tars.ui.formatter.Formatter;
// @@author A0139924W
/**
* Logic command test for tag
*/
public class TagLogicCommandTest extends LogicCommandTest {
@Test
public void execute_tag_invalidPrefix() throws Exception {
assertCommandBehavior("tag /gg", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
}
@Test
public void execute_tag_invalidFormat() throws Exception {
assertCommandBehavior("tag ", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag RANDOM_TEXT", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
}
@Test
public void execute_tag_invalidIndex() throws Exception {
// EP: negative number
assertCommandBehavior("tag /e -1 VALIDTASKNAME", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag /del -1",
MESSAGE_INVALID_TAG_DISPLAYED_INDEX);
// EP: zero
assertCommandBehavior("tag /e 0 VALIDTASKNAME",
MESSAGE_INVALID_TAG_DISPLAYED_INDEX);
assertCommandBehavior("tag /del 0",
MESSAGE_INVALID_TAG_DISPLAYED_INDEX);
// EP: signed number
assertCommandBehavior("tag /e +1 VALIDTASKNAME", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag /e -2 VALIDTASKNAME", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag /del +1",
MESSAGE_INVALID_TAG_DISPLAYED_INDEX);
assertCommandBehavior("tag /del -1",
MESSAGE_INVALID_TAG_DISPLAYED_INDEX);
// EP: invalid number
assertCommandBehavior("tag /del aaa", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag /del bbb", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
}
@Test
public void execute_tag_emptyParameters() throws Exception {
assertCommandBehavior("tag", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag ", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag -e", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag -e ", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag -del", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
assertCommandBehavior("tag -del ", String.format(
MESSAGE_INVALID_COMMAND_FORMAT, TagCommand.MESSAGE_USAGE));
}
@Test
public void execute_tagList_emptyListSuccessful() throws Exception {
// execute command and verify result
assertCommandBehavior("tag /ls",
new Formatter().formatTags(model.getUniqueTagList()));
}
@Test
public void execute_tagList_filledListSuccessful() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior("tag /ls",
new Formatter().formatTags(model.getUniqueTagList()),
expectedTars, expectedTars.getTaskList());
}
@Test
public void execute_tagRename_successful() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
ReadOnlyTag toBeRenamed =
expectedTars.getUniqueTagList().getInternalList().get(0);
Tag newTag = new Tag("tag3");
expectedTars.getUniqueTagList().update(toBeRenamed, newTag);
expectedTars.renameTasksWithNewTag(toBeRenamed, newTag);
// execute command and verify result
assertCommandBehavior("tag /e 1 tag3",
String.format(String.format(
TagCommand.MESSAGE_RENAME_TAG_SUCCESS, "tag1", "tag3")),
expectedTars, expectedTars.getTaskList());
}
@Test
public void execute_tagRename_duplicate() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior("tag /e 1 tag2", MESSAGE_DUPLICATE_TAG,
expectedTars, expectedTars.getTaskList());
}
@Test
public void execute_tagRename_invalidIndex() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior("tag /e 3 VALIDTAGNAME",
MESSAGE_INVALID_TAG_DISPLAYED_INDEX, expectedTars,
expectedTars.getTaskList());
assertCommandBehavior("tag /e 4 VALIDTAGNAME",
MESSAGE_INVALID_TAG_DISPLAYED_INDEX, expectedTars,
expectedTars.getTaskList());
}
@Test
public void execute_tagRename_invalidTagName() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior("tag /e 1 INVALID_TAG_NAME",
Tag.MESSAGE_TAG_CONSTRAINTS, expectedTars,
expectedTars.getTaskList());
}
@Test
public void execute_tagDel_successful() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
ReadOnlyTag toBeDeleted =
expectedTars.getUniqueTagList().getInternalList().get(0);
expectedTars.getUniqueTagList().remove(new Tag(toBeDeleted));
expectedTars.removeTagFromAllTasks(toBeDeleted);
// execute command and verify result
assertCommandBehavior("tag /del 1", String
.format(TagCommand.MESSAGE_DELETE_TAG_SUCCESS, toBeDeleted),
expectedTars, expectedTars.getTaskList());
}
@Test
public void execute_tagDel_invalidIndex() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior("tag /del 3", MESSAGE_INVALID_TAG_DISPLAYED_INDEX,
expectedTars, expectedTars.getTaskList());
assertCommandBehavior("tag /del 4", MESSAGE_INVALID_TAG_DISPLAYED_INDEX,
expectedTars, expectedTars.getTaskList());
}
@Test
public void execute_undoAndRedo_tagEditSuccessful() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
ReadOnlyTag toBeRenamed =
expectedTars.getUniqueTagList().getInternalList().get(0);
Tag newTag = new Tag("tag3");
expectedTars.getUniqueTagList().update(toBeRenamed, newTag);
expectedTars.renameTasksWithNewTag(toBeRenamed, newTag);
assertCommandBehavior("tag /e 1 tag3",
String.format(String.format(
TagCommand.MESSAGE_RENAME_TAG_SUCCESS, "tag1", "tag3")),
expectedTars, expectedTars.getTaskList());
toBeRenamed = expectedTars.getUniqueTagList().getInternalList().get(0);
newTag = new Tag("tag1");
expectedTars.getUniqueTagList().update(toBeRenamed, newTag);
expectedTars.renameTasksWithNewTag(toBeRenamed, newTag);
// execute undo and verify result
assertCommandBehavior(UndoCommand.COMMAND_WORD,
String.format(UndoCommand.MESSAGE_SUCCESS, ""), expectedTars,
expectedTars.getTaskList());
toBeRenamed = expectedTars.getUniqueTagList().getInternalList().get(0);
newTag = new Tag("tag3");
expectedTars.getUniqueTagList().update(toBeRenamed, newTag);
expectedTars.renameTasksWithNewTag(toBeRenamed, newTag);
// execute redo and verify result
assertCommandBehavior(RedoCommand.COMMAND_WORD,
String.format(RedoCommand.MESSAGE_SUCCESS, ""), expectedTars,
expectedTars.getTaskList());
}
@Test
public void execute_undoAndRedo_tagDelSuccessful() throws Exception {
// setup expectations
TypicalTestDataHelper helper = new TypicalTestDataHelper();
Task toBeAdded = helper.meetAdam();
Tars expectedTars = new Tars();
expectedTars.addTask(toBeAdded);
model.addTask(toBeAdded);
ReadOnlyTag toBeDeleted =
expectedTars.getUniqueTagList().getInternalList().get(0);
expectedTars.getUniqueTagList().remove(new Tag(toBeDeleted));
ArrayList<ReadOnlyTask> editedTaskList =
expectedTars.removeTagFromAllTasks(toBeDeleted);
// execute command and verify result
assertCommandBehavior("tag /del 1", String
.format(TagCommand.MESSAGE_DELETE_TAG_SUCCESS, toBeDeleted),
expectedTars, expectedTars.getTaskList());
expectedTars.getUniqueTagList().add(new Tag(toBeDeleted));
expectedTars.addTagToAllTasks(toBeDeleted, editedTaskList);
// execute undo and verify result
assertCommandBehavior(UndoCommand.COMMAND_WORD,
String.format(UndoCommand.MESSAGE_SUCCESS, ""), expectedTars,
expectedTars.getTaskList());
expectedTars.getUniqueTagList().remove(new Tag(toBeDeleted));
expectedTars.removeTagFromAllTasks(toBeDeleted);
// execute redo and verify result
assertCommandBehavior(RedoCommand.COMMAND_WORD,
String.format(RedoCommand.MESSAGE_SUCCESS, ""), expectedTars,
expectedTars.getTaskList());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gobblin.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.configuration.ConfigurationConverter;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigParseOptions;
import com.typesafe.config.ConfigSyntax;
import gobblin.configuration.ConfigurationKeys;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
/**
* Used to load pull files from the file system.
*/
@Slf4j
@Getter
public class PullFileLoader {
public static final String GLOBAL_PROPS_EXTENSION = ".properties";
public static final PathFilter GLOBAL_PROPS_PATH_FILTER = new ExtensionFilter(GLOBAL_PROPS_EXTENSION);
public static final Set<String> DEFAULT_JAVA_PROPS_PULL_FILE_EXTENSIONS = Sets.newHashSet("pull", "job");
public static final Set<String> DEFAULT_HOCON_PULL_FILE_EXTENSIONS = Sets.newHashSet("json", "conf");
private final Path rootDirectory;
private final FileSystem fs;
private final ExtensionFilter javaPropsPullFileFilter;
private final ExtensionFilter hoconPullFileFilter;
/**
* A {@link PathFilter} that accepts {@link Path}s based on a set of valid extensions.
*/
private static class ExtensionFilter implements PathFilter {
private final Collection<String> extensions;
public ExtensionFilter(String extension) {
this(Lists.newArrayList(extension));
}
public ExtensionFilter(Collection<String> extensions) {
this.extensions = Lists.newArrayList();
for (String ext : extensions) {
this.extensions.add(ext.startsWith(".") ? ext : "." + ext);
}
}
@Override
public boolean accept(final Path path) {
Predicate<String> predicate = new Predicate<String>() {
@Override
public boolean apply(String input) {
return path.getName().toLowerCase().endsWith(input);
}
};
return Iterables.any(this.extensions, predicate);
}
}
public PullFileLoader(Path rootDirectory, FileSystem fs, Collection<String> javaPropsPullFileExtensions,
Collection<String> hoconPullFileExtensions) {
Set<String> commonExtensions = Sets.intersection(Sets.newHashSet(javaPropsPullFileExtensions),
Sets.newHashSet(hoconPullFileExtensions));
Preconditions.checkArgument(commonExtensions.isEmpty(),
"Java props and HOCON pull file extensions intersect: " + Arrays.toString(commonExtensions.toArray()));
this.rootDirectory = rootDirectory;
this.fs = fs;
this.javaPropsPullFileFilter = new ExtensionFilter(javaPropsPullFileExtensions);
this.hoconPullFileFilter = new ExtensionFilter(hoconPullFileExtensions);
}
/**
* Load a single pull file.
* @param path The {@link Path} to the pull file to load, full path
* @param sysProps A {@link Config} used as fallback.
* @param loadGlobalProperties if true, will also load at most one *.properties file per directory from the
* {@link #rootDirectory} to the pull file {@link Path}.
* @return The loaded {@link Config}.
* @throws IOException
*/
public Config loadPullFile(Path path, Config sysProps, boolean loadGlobalProperties) throws IOException {
Config fallback = loadGlobalProperties ? loadAncestorGlobalConfigs(path, sysProps) : sysProps;
if (this.javaPropsPullFileFilter.accept(path)) {
return loadJavaPropsWithFallback(path, fallback).resolve();
} else if (this.hoconPullFileFilter.accept(path)) {
return loadHoconConfigAtPath(path).withFallback(fallback).resolve();
} else {
throw new IOException(String.format("Cannot load pull file %s due to unrecognized extension.", path));
}
}
/**
* Find and load all pull files under a base {@link Path} recursively.
* @param path base {@link Path} where pull files should be found recursively.
* @param sysProps A {@link Config} used as fallback.
* @param loadGlobalProperties if true, will also load at most one *.properties file per directory from the
* {@link #rootDirectory} to the pull file {@link Path} for each pull file.
* @return The loaded {@link Config}s.
*/
public Collection<Config> loadPullFilesRecursively(Path path, Config sysProps, boolean loadGlobalProperties) {
try {
Config fallback = sysProps;
if (loadGlobalProperties && PathUtils.isAncestor(this.rootDirectory, path.getParent())) {
fallback = loadAncestorGlobalConfigs(path.getParent(), fallback);
}
return loadPullFilesRecursivelyHelper(path, fallback, loadGlobalProperties);
} catch (IOException ioe) {
return Lists.newArrayList();
}
}
private Collection<Config> loadPullFilesRecursivelyHelper(Path path, Config fallback, boolean loadGlobalProperties) {
List<Config> pullFiles = Lists.newArrayList();
try {
if (loadGlobalProperties) {
fallback = findAndLoadGlobalConfigInDirectory(path, fallback);
}
FileStatus[] statuses = this.fs.listStatus(path);
if (statuses == null) {
log.error("Path does not exist: " + path);
return pullFiles;
}
for (FileStatus status : statuses) {
try {
if (status.isDirectory()) {
pullFiles.addAll(loadPullFilesRecursivelyHelper(status.getPath(), fallback, loadGlobalProperties));
} else if (this.javaPropsPullFileFilter.accept(status.getPath())) {
pullFiles.add(loadJavaPropsWithFallback(status.getPath(), fallback).resolve());
} else if (this.hoconPullFileFilter.accept(status.getPath())) {
pullFiles.add(loadHoconConfigAtPath(status.getPath()).withFallback(fallback).resolve());
}
} catch (IOException ioe) {
// Failed to load specific subpath, try with the other subpaths in this directory
log.error(String.format("Failed to load %s. Skipping.", status.getPath()));
}
}
return pullFiles;
} catch (IOException ioe) {
log.error("Could not load properties at path: " + path, ioe);
return Lists.newArrayList();
}
}
/**
* Load at most one *.properties files from path and each ancestor of path up to and including {@link #rootDirectory}.
* Higher directories will serve as fallback for lower directories, and sysProps will serve as fallback for all of them.
* @throws IOException
*/
private Config loadAncestorGlobalConfigs(Path path, Config sysProps) throws IOException {
Config config = sysProps;
if (!PathUtils.isAncestor(this.rootDirectory, path)) {
log.warn(String.format("Loaded path %s is not a descendant of root path %s. Cannot load global properties.",
path, this.rootDirectory));
} else {
List<Path> ancestorPaths = Lists.newArrayList();
while (PathUtils.isAncestor(this.rootDirectory, path)) {
ancestorPaths.add(path);
path = path.getParent();
}
List<Path> reversedAncestors = Lists.reverse(ancestorPaths);
for (Path ancestor : reversedAncestors) {
config = findAndLoadGlobalConfigInDirectory(ancestor, config);
}
}
return config;
}
/**
* Find at most one *.properties file in the input {@link Path} and load it using fallback as fallback.
* @return The {@link Config} in path with sysProps as fallback.
* @throws IOException
*/
private Config findAndLoadGlobalConfigInDirectory(Path path, Config fallback) throws IOException {
FileStatus[] files = this.fs.listStatus(path, GLOBAL_PROPS_PATH_FILTER);
if (files == null) {
log.warn("Could not list files at path " + path);
return ConfigFactory.empty();
}
if (files.length > 1) {
throw new IOException("Found more than one global properties file at path " + path);
}
return files.length == 1 ? loadJavaPropsWithFallback(files[0].getPath(), fallback) : fallback;
}
/**
* Load a {@link Properties} compatible path using fallback as fallback.
* @return The {@link Config} in path with fallback as fallback.
* @throws IOException
*/
private Config loadJavaPropsWithFallback(Path propertiesPath, Config fallback) throws IOException {
PropertiesConfiguration propertiesConfiguration = new PropertiesConfiguration();
try (InputStreamReader inputStreamReader = new InputStreamReader(this.fs.open(propertiesPath),
Charsets.UTF_8)) {
propertiesConfiguration.load(inputStreamReader);
Config configFromProps =
ConfigUtils.propertiesToConfig(ConfigurationConverter.getProperties(propertiesConfiguration));
return ConfigFactory.parseMap(ImmutableMap.of(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY,
PathUtils.getPathWithoutSchemeAndAuthority(propertiesPath).toString()))
.withFallback(configFromProps)
.withFallback(fallback);
} catch (ConfigurationException ce) {
throw new IOException(ce);
}
}
private Config loadHoconConfigAtPath(Path path) throws IOException {
try (InputStream is = fs.open(path);
Reader reader = new InputStreamReader(is, Charsets.UTF_8)) {
return ConfigFactory.parseMap(ImmutableMap.of(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY,
PathUtils.getPathWithoutSchemeAndAuthority(path).toString()))
.withFallback(ConfigFactory.parseReader(reader, ConfigParseOptions.defaults().setSyntax(ConfigSyntax.CONF)));
}
}
}
| |
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2019 Guardsquare NV
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.optimize.gson;
import proguard.classfile.*;
import proguard.classfile.attribute.*;
import proguard.classfile.attribute.visitor.*;
import proguard.classfile.constant.*;
import proguard.classfile.constant.visitor.ConstantVisitor;
import proguard.classfile.instruction.*;
import proguard.classfile.instruction.visitor.InstructionVisitor;
import proguard.classfile.util.SimplifiedVisitor;
import proguard.evaluation.TracedStack;
import proguard.evaluation.value.InstructionOffsetValue;
import proguard.optimize.evaluation.PartialEvaluator;
import proguard.util.ArrayUtil;
/**
* This instruction visitor searches for the types that are passed as arguments
* to a method. The visited instruction is assumed to be the producer that put
* the types on the stack.
*
* It can recognize Class objects and Type objects that were derived from
* TypeToken.getType().
*
* @author Lars Vandenbergh
*/
class TypeArgumentFinder
extends SimplifiedVisitor
implements InstructionVisitor,
ConstantVisitor
{
private final ClassPool programClassPool;
private final PartialEvaluator partialEvaluator;
String[] typeArgumentClasses;
/**
* Creates a new TypeArgumentFinder.
*
* @param programClassPool the program class pool used for looking up
* class references.
* @param partialEvaluator the partial evaluator used to evaluate visited
* code attributes.
*/
TypeArgumentFinder(ClassPool programClassPool,
PartialEvaluator partialEvaluator)
{
this.programClassPool = programClassPool;
this.partialEvaluator = partialEvaluator;
}
// Implementations for InstructionVisitor.
@Override
public void visitAnyInstruction(Clazz clazz,
Method method,
CodeAttribute codeAttribute,
int offset,
Instruction instruction) {}
@Override
public void visitVariableInstruction(Clazz clazz,
Method method,
CodeAttribute codeAttribute,
int offset,
VariableInstruction variableInstruction)
{
if (variableInstruction.canonicalOpcode() == InstructionConstants.OP_ALOAD)
{
// Find the operation that stored the loaded Type.
LastStoreFinder lastStoreFinder = new LastStoreFinder(variableInstruction.variableIndex);
codeAttribute.instructionsAccept(clazz, method, 0, offset, lastStoreFinder);
if (lastStoreFinder.lastStore != null)
{
// Find out which instruction produced the stored Type.
TracedStack stackBeforeStore = partialEvaluator.getStackBefore(lastStoreFinder.lastStoreOffset);
InstructionOffsetValue instructionOffsetValue = stackBeforeStore.getTopProducerValue(0).instructionOffsetValue();
// Derive the signature of the subclass of TypeToken from which the Type is retrieved.
TypeTokenSignatureFinder typeTokenFinder = new TypeTokenSignatureFinder();
for (int offsetIndex = 0; offsetIndex < instructionOffsetValue.instructionOffsetCount(); offsetIndex++)
{
int instructionOffset = instructionOffsetValue.instructionOffset(offsetIndex);
codeAttribute.instructionAccept(clazz, method, instructionOffset, typeTokenFinder);
}
// Derive the classes from the signature of the TypeToken subclass.
if (typeTokenFinder.typeTokenSignature != null)
{
typeArgumentClasses = new String[0];
Clazz[] referencedClasses = typeTokenFinder.typeTokenSignature.referencedClasses;
for (Clazz referencedClass : referencedClasses)
{
if (referencedClass!= null &&
!referencedClass.getName().equals(GsonClassConstants.NAME_TYPE_TOKEN))
{
typeArgumentClasses = ArrayUtil.add(typeArgumentClasses,
typeArgumentClasses.length,
referencedClass.getName());
}
}
}
}
}
}
public void visitConstantInstruction(Clazz clazz,
Method method,
CodeAttribute codeAttribute,
int offset,
ConstantInstruction constantInstruction)
{
clazz.constantPoolEntryAccept(constantInstruction.constantIndex, this);
}
// Implementations for ConstantVisitor.
@Override
public void visitAnyConstant(Clazz clazz, Constant constant)
{
}
@Override
public void visitAnyRefConstant(Clazz clazz, RefConstant refConstant)
{
typeArgumentClasses = new String[] { refConstant.getClassName(clazz) };
}
@Override
public void visitClassConstant(Clazz clazz, ClassConstant classConstant)
{
typeArgumentClasses = new String[] { classConstant.getName(clazz) };
}
private static class LastStoreFinder
extends SimplifiedVisitor
implements InstructionVisitor
{
private final int variableIndex;
private int lastStoreOffset;
private VariableInstruction lastStore;
public LastStoreFinder(int variableIndex)
{
this.variableIndex = variableIndex;
}
// Implementations for InstructionVisitor.
@Override
public void visitAnyInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, Instruction instruction)
{
}
@Override
public void visitVariableInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, VariableInstruction variableInstruction)
{
if(variableInstruction.variableIndex == variableIndex &&
variableInstruction.canonicalOpcode() == InstructionConstants.OP_ASTORE){
lastStoreOffset = offset;
lastStore = variableInstruction;
}
}
}
private class TypeTokenSignatureFinder
extends SimplifiedVisitor
implements InstructionVisitor,
ConstantVisitor,
AttributeVisitor
{
private SignatureAttribute typeTokenSignature;
// Implementations for InstructionVisitor.
@Override
public void visitAnyInstruction(Clazz clazz,
Method method,
CodeAttribute codeAttribute,
int offset,
Instruction instruction)
{
}
@Override
public void visitConstantInstruction(Clazz clazz,
Method method,
CodeAttribute codeAttribute,
int offset,
ConstantInstruction constantInstruction)
{
clazz.constantPoolEntryAccept(constantInstruction.constantIndex, this);
}
// Implementations for ConstantVisitor.
@Override
public void visitAnyConstant(Clazz clazz, Constant constant)
{
}
@Override
public void visitMethodrefConstant(Clazz clazz,
MethodrefConstant methodrefConstant)
{
if (methodrefConstant.referencedClass.getName().equals(GsonClassConstants.NAME_TYPE_TOKEN) &&
methodrefConstant.getName(clazz).equals(GsonClassConstants.METHOD_NAME_GET_TYPE))
{
programClassPool.classAccept(methodrefConstant.getClassName(clazz),
new AllAttributeVisitor(this));
}
}
// Implementations for AttributeVisitor.
@Override
public void visitAnyAttribute(Clazz clazz, Attribute attribute)
{
}
@Override
public void visitSignatureAttribute(Clazz clazz,
SignatureAttribute signatureAttribute)
{
typeTokenSignature = signatureAttribute;
}
}
}
| |
/*******************************************************************************
* BEGIN COPYRIGHT NOTICE
*
* Copyright [2011] [Rodrigo Lemos]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* END COPYRIGHT NOTICE
******************************************************************************/
package objectexplorer;
import objectexplorer.ObjectExplorer.Feature;
import objectexplorer.ObjectVisitor.Traversal;
import org.junit.Test;
import org.mockito.InOrder;
import java.util.EnumSet;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.CoreMatchers.sameInstance;
import static org.hamcrest.Matchers.hasProperty;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.argThat;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
@SuppressWarnings("unchecked")
public class ObjectExplorerUnitTest {
@Test
public void testExploreNull() {
ObjectVisitor<Object> visitor = mock(ObjectVisitor.class);
final Object result = new Object();
when(visitor.result()).thenReturn(result);
Object root = null;
assertThat(
ObjectExplorer.exploreObject(root, visitor, EnumSet.noneOf(Feature.class)),
is(sameInstance(result))
);
verify(visitor).result();
verifyNoMoreInteractions(visitor);
}
@Test
public void testExplorePlainInode() {
ObjectVisitor<Object> visitor = mock(ObjectVisitor.class);
final Object result = new Object();
when(visitor.visit((Chain) anyObject())).thenReturn(Traversal.EXPLORE);
when(visitor.result()).thenReturn(result);
Inode root = new Inode();
assertThat(
ObjectExplorer.exploreObject(root, visitor, EnumSet.noneOf(Feature.class)),
is(sameInstance(result))
);
InOrder inOrder = inOrder(visitor);
inOrder.verify(visitor).visit((Chain) argThat(allOf(
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root))
)));
inOrder.verify(visitor).result();
verifyNoMoreInteractions(visitor);
}
@Test
public void testExploreEmptyFolder() {
final ObjectVisitor<Object> visitor = mock(ObjectVisitor.class);
final Object result = new Object();
when(visitor.visit((Chain) anyObject())).thenReturn(Traversal.EXPLORE);
when(visitor.result()).thenReturn(result);
final Folder root = new Folder();
root.contents = new Inode[]{};
assertThat(
ObjectExplorer.exploreObject(root, visitor, EnumSet.noneOf(Feature.class)),
is(sameInstance(result))
);
final InOrder inOrder = inOrder(visitor);
inOrder.verify(visitor).visit((Chain) argThat(allOf(
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root))
)));
inOrder.verify(visitor).visit((Chain.FieldChain) argThat(allOf(
is(instanceOf(Chain.FieldChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root.contents))
// won't work since hamcrest can't access Chain.FieldChain
// hasProperty("field", hasProperty("name", is(equalTo("contents"))))
)));
inOrder.verify(visitor).result();
verifyNoMoreInteractions(visitor);
}
@Test
public void testExploreFolderWith1NullSubinode() {
final ObjectVisitor<Object> visitor = mock(ObjectVisitor.class);
final Object result = new Object();
when(visitor.visit((Chain) anyObject())).thenReturn(Traversal.EXPLORE);
when(visitor.result()).thenReturn(result);
final Folder root = new Folder();
root.contents = new Inode[]{null};
assertThat(
ObjectExplorer.exploreObject(root, visitor, EnumSet.of(Feature.VISIT_NULL)),
is(sameInstance(result))
);
final InOrder inOrder = inOrder(visitor);
inOrder.verify(visitor).visit((Chain) argThat(allOf(
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root))
)));
inOrder.verify(visitor).visit((Chain.FieldChain) argThat(allOf(
is(instanceOf(Chain.FieldChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root.contents))
// won't work since hamcrest can't access Chain.FieldChain
// hasProperty("field", hasProperty("name", is(equalTo("contents"))))
)));
inOrder.verify(visitor).visit((Chain.ArrayIndexChain) argThat(allOf(
is(instanceOf(Chain.ArrayIndexChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", nullValue(Inode.class)),
hasProperty("parent", hasProperty("value", sameInstance(root.contents)))
// won't work since hamcrest can't access Chain.ArrayIndexChain
// hasProperty("arrayIndex", is(equalTo(0)))
)));
inOrder.verify(visitor).result();
verifyNoMoreInteractions(visitor);
}
@Test
public void testExploreFolderWith1FileWithoutContents() {
final ObjectVisitor<Object> visitor = mock(ObjectVisitor.class);
final Object result = new Object();
when(visitor.visit((Chain) anyObject())).thenReturn(Traversal.EXPLORE);
when(visitor.result()).thenReturn(result);
Folder root = new Folder();
File file = new File();
root.contents = new Inode[]{file};
file.contents = null;
file.created = 42;
assertThat(
ObjectExplorer.exploreObject(root, visitor, EnumSet.allOf(Feature.class)),
is(sameInstance(result))
);
final InOrder inOrder = inOrder(visitor);
inOrder.verify(visitor).visit((Chain) argThat(allOf(
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root))
)));
inOrder.verify(visitor).visit((Chain.FieldChain) argThat(allOf(
is(instanceOf(Chain.FieldChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root.contents))
// won't work since hamcrest can't access Chain.FieldChain
// hasProperty("field", hasProperty("name", is(equalTo("contents"))))
)));
inOrder.verify(visitor).visit((Chain.ArrayIndexChain) argThat(allOf(
is(instanceOf(Chain.ArrayIndexChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(file)),
hasProperty("parent", hasProperty("value", sameInstance(root.contents)))
// won't work since hamcrest can't access Chain.ArrayIndexChain
// hasProperty("arrayIndex", is(equalTo(0)))
)));
inOrder.verify(visitor).visit((Chain.FieldChain) argThat(allOf(
is(instanceOf(Chain.FieldChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", nullValue(byte[].class)),
hasProperty("parent", hasProperty("value", sameInstance(file)))
// won't work since hamcrest can't access Chain.FieldChain
// hasProperty("field", hasProperty("name", is(equalTo("contents"))))
)));
inOrder.verify(visitor).visit((Chain.FieldChain) argThat(allOf(
is(instanceOf(Chain.FieldChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", is(equalTo(file.created))),
hasProperty("parent", hasProperty("value", sameInstance(file)))
// won't work since hamcrest can't access Chain.FieldChain
// hasProperty("field", hasProperty("name", is(equalTo("created"))))
)));
inOrder.verify(visitor).result();
verifyNoMoreInteractions(visitor);
}
@Test
public void testExploreFolderWith1FileWithContents() {
final Object result = new Object();
final ObjectVisitor<Object> visitor = mock(ObjectVisitor.class);
when(visitor.visit((Chain) anyObject())).thenReturn(Traversal.EXPLORE);
when(visitor.result()).thenReturn(result);
Folder root = new Folder();
File file = new File();
root.contents = new Inode[]{file};
file.contents = new byte[]{0x0, 0x1, 0x2};
file.created = 0x42;
assertThat(
ObjectExplorer.exploreObject(root, visitor, EnumSet.allOf(Feature.class)),
is(sameInstance(result))
);
final InOrder inOrder = inOrder(visitor);
inOrder.verify(visitor).visit((Chain) argThat(allOf(
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root))
)));
inOrder.verify(visitor).visit((Chain.FieldChain) argThat(allOf(
is(instanceOf(Chain.FieldChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(root.contents))
// won't work since hamcrest can't access Chain.FieldChain
// hasProperty("field", hasProperty("name", is(equalTo("contents"))))
)));
inOrder.verify(visitor).visit((Chain.ArrayIndexChain) argThat(allOf(
is(instanceOf(Chain.ArrayIndexChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(file)),
hasProperty("parent", hasProperty("value", sameInstance(root.contents)))
// won't work since hamcrest can't access Chain.ArrayIndexChain
// hasProperty("arrayIndex", is(equalTo(0)))
)));
inOrder.verify(visitor).visit((Chain.FieldChain) argThat(allOf(
is(instanceOf(Chain.FieldChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", is(equalTo(file.created))),
hasProperty("parent", hasProperty("value", sameInstance(file)))
// won't work since hamcrest can't access Chain.FieldChain
// hasProperty("field", hasProperty("name", is(equalTo("created"))))
)));
inOrder.verify(visitor).visit((Chain.FieldChain) argThat(allOf(
is(instanceOf(Chain.FieldChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", sameInstance(file.contents)),
hasProperty("parent", hasProperty("value", sameInstance(file)))
// won't work since hamcrest can't access Chain.FieldChain
// hasProperty("field", hasProperty("name", is(equalTo("contents"))))
)));
inOrder.verify(visitor).visit((Chain.ArrayIndexChain) argThat(allOf(
is(instanceOf(Chain.ArrayIndexChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", is(equalTo(file.contents[2]))),
hasProperty("parent", hasProperty("value", sameInstance(file.contents)))
// won't work since hamcrest can't access Chain.ArrayIndexChain
// hasProperty("arrayIndex", is(equalTo(2)))
)));
inOrder.verify(visitor).visit((Chain.ArrayIndexChain) argThat(allOf(
is(instanceOf(Chain.ArrayIndexChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", is(equalTo(file.contents[1]))),
hasProperty("parent", hasProperty("value", sameInstance(file.contents)))
// won't work since hamcrest can't access Chain.ArrayIndexChain
// hasProperty("arrayIndex", is(equalTo(1)))
)));
inOrder.verify(visitor).visit((Chain.ArrayIndexChain) argThat(allOf(
is(instanceOf(Chain.ArrayIndexChain.class)),
hasProperty("root", sameInstance(root)),
hasProperty("value", is(equalTo(file.contents[0]))),
hasProperty("parent", hasProperty("value", sameInstance(file.contents)))
// won't work since hamcrest can't access Chain.ArrayIndexChain
// hasProperty("arrayIndex", is(equalTo(0)))
)));
inOrder.verify(visitor).result();
verifyNoMoreInteractions(visitor);
}
}
/**
* *************************************************************************
*/
class Inode {
}
class Folder extends Inode {
public Inode[] contents;
}
class File extends Inode {
public byte[] contents;
public int created;
}
| |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.group;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gerrit.extensions.restapi.AuthException;
import com.google.gerrit.extensions.restapi.DefaultInput;
import com.google.gerrit.extensions.restapi.MethodNotAllowedException;
import com.google.gerrit.extensions.restapi.RestModifyView;
import com.google.gerrit.extensions.restapi.UnprocessableEntityException;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.AccountGroupMember;
import com.google.gerrit.reviewdb.client.AccountGroupMemberAudit;
import com.google.gerrit.reviewdb.client.AuthType;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.account.AccountCache;
import com.google.gerrit.server.account.AccountException;
import com.google.gerrit.server.account.AccountInfo;
import com.google.gerrit.server.account.AccountManager;
import com.google.gerrit.server.account.AccountResolver;
import com.google.gerrit.server.account.AccountsCollection;
import com.google.gerrit.server.account.AuthRequest;
import com.google.gerrit.server.account.GroupControl;
import com.google.gerrit.server.config.AuthConfig;
import com.google.gerrit.server.group.AddMembers.Input;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import java.util.List;
import java.util.Map;
public class AddMembers implements RestModifyView<GroupResource, Input> {
public static class Input {
@DefaultInput
String _oneMember;
List<String> members;
public static Input fromMembers(List<String> members) {
Input in = new Input();
in.members = members;
return in;
}
static Input init(Input in) {
if (in == null) {
in = new Input();
}
if (in.members == null) {
in.members = Lists.newArrayListWithCapacity(1);
}
if (!Strings.isNullOrEmpty(in._oneMember)) {
in.members.add(in._oneMember);
}
return in;
}
}
private final AccountManager accountManager;
private final AuthType authType;
private final Provider<AccountsCollection> accounts;
private final AccountResolver accountResolver;
private final AccountCache accountCache;
private final AccountInfo.Loader.Factory infoFactory;
private final ReviewDb db;
@Inject
AddMembers(AccountManager accountManager,
AuthConfig authConfig,
Provider<AccountsCollection> accounts,
AccountResolver accountResolver,
AccountCache accountCache,
AccountInfo.Loader.Factory infoFactory,
ReviewDb db) {
this.accountManager = accountManager;
this.authType = authConfig.getAuthType();
this.accounts = accounts;
this.accountResolver = accountResolver;
this.accountCache = accountCache;
this.infoFactory = infoFactory;
this.db = db;
}
@Override
public List<AccountInfo> apply(GroupResource resource, Input input)
throws AuthException, MethodNotAllowedException,
UnprocessableEntityException, OrmException {
AccountGroup internalGroup = resource.toAccountGroup();
if (internalGroup == null) {
throw new MethodNotAllowedException();
}
input = Input.init(input);
GroupControl control = resource.getControl();
Map<Account.Id, AccountGroupMember> newAccountGroupMembers = Maps.newHashMap();
List<AccountGroupMemberAudit> newAccountGroupMemberAudits = Lists.newLinkedList();
List<AccountInfo> result = Lists.newLinkedList();
Account.Id me = ((IdentifiedUser) control.getCurrentUser()).getAccountId();
AccountInfo.Loader loader = infoFactory.create(true);
for (String nameOrEmail : input.members) {
Account a = findAccount(nameOrEmail);
if (!a.isActive()) {
throw new UnprocessableEntityException(String.format(
"Account Inactive: %s", nameOrEmail));
}
if (!control.canAddMember(a.getId())) {
throw new AuthException("Cannot add member: " + a.getFullName());
}
if (!newAccountGroupMembers.containsKey(a.getId())) {
AccountGroupMember.Key key =
new AccountGroupMember.Key(a.getId(), internalGroup.getId());
AccountGroupMember m = db.accountGroupMembers().get(key);
if (m == null) {
m = new AccountGroupMember(key);
newAccountGroupMembers.put(m.getAccountId(), m);
newAccountGroupMemberAudits.add(new AccountGroupMemberAudit(m, me));
}
}
result.add(loader.get(a.getId()));
}
db.accountGroupMembersAudit().insert(newAccountGroupMemberAudits);
db.accountGroupMembers().insert(newAccountGroupMembers.values());
for (AccountGroupMember m : newAccountGroupMembers.values()) {
accountCache.evict(m.getAccountId());
}
loader.fill();
return result;
}
private Account findAccount(String nameOrEmail) throws AuthException,
UnprocessableEntityException, OrmException {
try {
return accounts.get().parse(nameOrEmail).getAccount();
} catch (UnprocessableEntityException e) {
// might be because the account does not exist or because the account is
// not visible
switch (authType) {
case HTTP_LDAP:
case CLIENT_SSL_CERT_LDAP:
case LDAP:
if (accountResolver.find(nameOrEmail) == null) {
// account does not exist, try to create it
return createAccountByLdap(nameOrEmail);
}
break;
default:
}
throw e;
}
}
private Account createAccountByLdap(String user) {
if (!user.matches(Account.USER_NAME_PATTERN)) {
return null;
}
try {
AuthRequest req = AuthRequest.forUser(user);
req.setSkipAuthentication(true);
return accountCache.get(accountManager.authenticate(req).getAccountId())
.getAccount();
} catch (AccountException e) {
return null;
}
}
static class PutMember implements RestModifyView<GroupResource, PutMember.Input> {
static class Input {
}
private final Provider<AddMembers> put;
private final String id;
PutMember(Provider<AddMembers> put, String id) {
this.put = put;
this.id = id;
}
@Override
public Object apply(GroupResource resource, PutMember.Input input)
throws AuthException, MethodNotAllowedException,
UnprocessableEntityException, OrmException {
AddMembers.Input in = new AddMembers.Input();
in._oneMember = id;
List<AccountInfo> list = put.get().apply(resource, in);
if (list.size() == 1) {
return list.get(0);
}
throw new IllegalStateException();
}
}
static class UpdateMember implements RestModifyView<MemberResource, PutMember.Input> {
static class Input {
}
private final Provider<GetMember> get;
@Inject
UpdateMember(Provider<GetMember> get) {
this.get = get;
}
@Override
public Object apply(MemberResource resource, PutMember.Input input)
throws OrmException {
// Do nothing, the user is already a member.
return get.get().apply(resource);
}
}
}
| |
package ca.uhn.fhir.rest.client;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2017 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.client.api.IHttpClient;
import ca.uhn.fhir.rest.client.api.IRestfulClient;
import ca.uhn.fhir.rest.client.exceptions.FhirClientConnectionException;
import ca.uhn.fhir.rest.client.exceptions.FhirClientInappropriateForServerException;
import ca.uhn.fhir.rest.method.BaseMethodBinding;
import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.util.FhirTerser;
/**
* Base class for a REST client factory implementation
*/
public abstract class RestfulClientFactory implements IRestfulClientFactory {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RestfulClientFactory.class);
private int myConnectionRequestTimeout = DEFAULT_CONNECTION_REQUEST_TIMEOUT;
private int myConnectTimeout = DEFAULT_CONNECT_TIMEOUT;
private FhirContext myContext;
private Map<Class<? extends IRestfulClient>, ClientInvocationHandlerFactory> myInvocationHandlers = new HashMap<Class<? extends IRestfulClient>, ClientInvocationHandlerFactory>();
private ServerValidationModeEnum myServerValidationMode = DEFAULT_SERVER_VALIDATION_MODE;
private int mySocketTimeout = DEFAULT_SOCKET_TIMEOUT;
private Set<String> myValidatedServerBaseUrls = Collections.synchronizedSet(new HashSet<String>());
private String myProxyUsername;
private String myProxyPassword;
private int myPoolMaxTotal = DEFAULT_POOL_MAX;
private int myPoolMaxPerRoute = DEFAULT_POOL_MAX_PER_ROUTE;
/**
* Constructor
*/
public RestfulClientFactory() {
}
/**
* Constructor
*
* @param theFhirContext
* The context
*/
public RestfulClientFactory(FhirContext theFhirContext) {
myContext = theFhirContext;
}
@Override
public int getConnectionRequestTimeout() {
return myConnectionRequestTimeout;
}
@Override
public int getConnectTimeout() {
return myConnectTimeout;
}
/**
* Return the proxy username to authenticate with the HTTP proxy
* @param The proxy username
*/
protected String getProxyUsername() {
return myProxyUsername;
}
/**
* Return the proxy password to authenticate with the HTTP proxy
* @param The proxy password
*/
protected String getProxyPassword() {
return myProxyPassword;
}
@Override
public void setProxyCredentials(String theUsername, String thePassword) {
myProxyUsername=theUsername;
myProxyPassword=thePassword;
}
@Override
public ServerValidationModeEnum getServerValidationMode() {
return myServerValidationMode;
}
@Override
public int getSocketTimeout() {
return mySocketTimeout;
}
@Override
public int getPoolMaxTotal() {
return myPoolMaxTotal;
}
@Override
public int getPoolMaxPerRoute() {
return myPoolMaxPerRoute;
}
@SuppressWarnings("unchecked")
private <T extends IRestfulClient> T instantiateProxy(Class<T> theClientType, InvocationHandler theInvocationHandler) {
T proxy = (T) Proxy.newProxyInstance(theClientType.getClassLoader(), new Class[] { theClientType }, theInvocationHandler);
return proxy;
}
/**
* Instantiates a new client instance
*
* @param theClientType
* The client type, which is an interface type to be instantiated
* @param theServerBase
* The URL of the base for the restful FHIR server to connect to
* @return A newly created client
* @throws ConfigurationException
* If the interface type is not an interface
*/
@Override
public synchronized <T extends IRestfulClient> T newClient(Class<T> theClientType, String theServerBase) {
validateConfigured();
if (!theClientType.isInterface()) {
throw new ConfigurationException(theClientType.getCanonicalName() + " is not an interface");
}
ClientInvocationHandlerFactory invocationHandler = myInvocationHandlers.get(theClientType);
if (invocationHandler == null) {
IHttpClient httpClient = getHttpClient(theServerBase);
invocationHandler = new ClientInvocationHandlerFactory(httpClient, myContext, theServerBase, theClientType);
for (Method nextMethod : theClientType.getMethods()) {
BaseMethodBinding<?> binding = BaseMethodBinding.bindMethod(nextMethod, myContext, null);
invocationHandler.addBinding(nextMethod, binding);
}
myInvocationHandlers.put(theClientType, invocationHandler);
}
T proxy = instantiateProxy(theClientType, invocationHandler.newInvocationHandler(this));
return proxy;
}
/**
* Called automatically before the first use of this factory to ensure that
* the configuration is sane. Subclasses may override, but should also call
* <code>super.validateConfigured()</code>
*/
protected void validateConfigured() {
if (getFhirContext() == null) {
throw new IllegalStateException(getClass().getSimpleName() + " does not have FhirContext defined. This must be set via " + getClass().getSimpleName() + "#setFhirContext(FhirContext)");
}
}
@Override
public synchronized IGenericClient newGenericClient(String theServerBase) {
validateConfigured();
IHttpClient httpClient = getHttpClient(theServerBase);
return new GenericClient(myContext, httpClient, theServerBase, this);
}
@Override
public void validateServerBaseIfConfiguredToDoSo(String theServerBase, IHttpClient theHttpClient, BaseClient theClient) {
String serverBase = normalizeBaseUrlForMap(theServerBase);
switch (getServerValidationMode()) {
case NEVER:
break;
case ONCE:
if (!myValidatedServerBaseUrls.contains(serverBase)) {
validateServerBase(serverBase, theHttpClient, theClient);
}
break;
}
}
private String normalizeBaseUrlForMap(String theServerBase) {
String serverBase = theServerBase;
if (!serverBase.endsWith("/")) {
serverBase = serverBase + "/";
}
return serverBase;
}
@Override
public synchronized void setConnectionRequestTimeout(int theConnectionRequestTimeout) {
myConnectionRequestTimeout = theConnectionRequestTimeout;
resetHttpClient();
}
@Override
public synchronized void setConnectTimeout(int theConnectTimeout) {
myConnectTimeout = theConnectTimeout;
resetHttpClient();
}
/**
* Sets the context associated with this client factory. Must not be called more than once.
*/
public void setFhirContext(FhirContext theContext) {
if (myContext != null && myContext != theContext) {
throw new IllegalStateException("RestfulClientFactory instance is already associated with one FhirContext. RestfulClientFactory instances can not be shared.");
}
myContext = theContext;
}
/**
* Return the fhir context
* @return the fhir context
*/
public FhirContext getFhirContext() {
return myContext;
}
@Override
public void setServerValidationMode(ServerValidationModeEnum theServerValidationMode) {
Validate.notNull(theServerValidationMode, "theServerValidationMode may not be null");
myServerValidationMode = theServerValidationMode;
}
@Override
public synchronized void setSocketTimeout(int theSocketTimeout) {
mySocketTimeout = theSocketTimeout;
resetHttpClient();
}
@Override
public synchronized void setPoolMaxTotal(int thePoolMaxTotal) {
myPoolMaxTotal = thePoolMaxTotal;
resetHttpClient();
}
@Override
public synchronized void setPoolMaxPerRoute(int thePoolMaxPerRoute) {
myPoolMaxPerRoute = thePoolMaxPerRoute;
resetHttpClient();
}
@SuppressWarnings("unchecked")
@Override
public void validateServerBase(String theServerBase, IHttpClient theHttpClient, BaseClient theClient) {
GenericClient client = new GenericClient(myContext, theHttpClient, theServerBase, this);
client.setEncoding(theClient.getEncoding());
for (IClientInterceptor interceptor : theClient.getInterceptors()) {
client.registerInterceptor(interceptor);
}
client.setDontValidateConformance(true);
IBaseResource conformance;
try {
String capabilityStatementResourceName = "CapabilityStatement";
if (myContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) {
capabilityStatementResourceName = "Conformance";
}
@SuppressWarnings("rawtypes")
Class implementingClass;
try {
implementingClass = myContext.getResourceDefinition(capabilityStatementResourceName).getImplementingClass();
} catch (DataFormatException e) {
if (!myContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) {
capabilityStatementResourceName = "Conformance";
implementingClass = myContext.getResourceDefinition(capabilityStatementResourceName).getImplementingClass();
} else {
throw e;
}
}
try {
conformance = (IBaseResource) client.fetchConformance().ofType(implementingClass).execute();
} catch (FhirClientConnectionException e) {
if (!myContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3) && e.getCause() instanceof DataFormatException) {
capabilityStatementResourceName = "Conformance";
implementingClass = myContext.getResourceDefinition(capabilityStatementResourceName).getImplementingClass();
conformance = (IBaseResource) client.fetchConformance().ofType(implementingClass).execute();
} else {
throw e;
}
}
} catch (FhirClientConnectionException e) {
String msg = myContext.getLocalizer().getMessage(RestfulClientFactory.class, "failedToRetrieveConformance", theServerBase + Constants.URL_TOKEN_METADATA);
throw new FhirClientConnectionException(msg, e);
}
FhirTerser t = myContext.newTerser();
String serverFhirVersionString = null;
Object value = t.getSingleValueOrNull(conformance, "fhirVersion");
if (value instanceof IPrimitiveType) {
serverFhirVersionString = IPrimitiveType.class.cast(value).getValueAsString();
}
FhirVersionEnum serverFhirVersionEnum = null;
if (StringUtils.isBlank(serverFhirVersionString)) {
// we'll be lenient and accept this
} else {
//FIXME null access on serverFhirVersionString
if (serverFhirVersionString.startsWith("0.80") || serverFhirVersionString.startsWith("0.0.8")) {
serverFhirVersionEnum = FhirVersionEnum.DSTU1;
} else if (serverFhirVersionString.startsWith("0.4")) {
serverFhirVersionEnum = FhirVersionEnum.DSTU2;
} else if (serverFhirVersionString.startsWith("0.5")) {
serverFhirVersionEnum = FhirVersionEnum.DSTU2;
} else {
// we'll be lenient and accept this
ourLog.debug("Server conformance statement indicates unknown FHIR version: {}", serverFhirVersionString);
}
}
if (serverFhirVersionEnum != null) {
FhirVersionEnum contextFhirVersion = myContext.getVersion().getVersion();
if (!contextFhirVersion.isEquivalentTo(serverFhirVersionEnum)) {
throw new FhirClientInappropriateForServerException(myContext.getLocalizer().getMessage(RestfulClientFactory.class, "wrongVersionInConformance", theServerBase + Constants.URL_TOKEN_METADATA, serverFhirVersionString, serverFhirVersionEnum, contextFhirVersion));
}
}
myValidatedServerBaseUrls.add(normalizeBaseUrlForMap(theServerBase));
}
@Deprecated //override deprecated method
@Override
public ServerValidationModeEnum getServerValidationModeEnum() {
return getServerValidationMode();
}
@Deprecated //override deprecated method
@Override
public void setServerValidationModeEnum(ServerValidationModeEnum theServerValidationMode) {
setServerValidationMode(theServerValidationMode);
}
/**
* Get the http client for the given server base
* @param theServerBase the server base
* @return the http client
*/
protected abstract IHttpClient getHttpClient(String theServerBase);
/**
* Reset the http client. This method is used when parameters have been set and a
* new http client needs to be created
*/
protected abstract void resetHttpClient();
}
| |
//
// Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.7 generiert
// Siehe <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// nderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren.
// Generiert: 2016.04.19 um 09:36:45 AM CEST
//
package de.immobilienscout24.rest.schema.offer.realestates._1;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import de.immobilienscout24.rest.schema.common._1.Adapter1;
import de.immobilienscout24.rest.schema.common._1.CourtageInfo;
import de.immobilienscout24.rest.schema.common._1.HouseTypeBuildingType;
import de.immobilienscout24.rest.schema.common._1.HouseTypeConstructionMethodType;
import de.immobilienscout24.rest.schema.common._1.HouseTypeEnergyStandardType;
import de.immobilienscout24.rest.schema.common._1.HouseTypeStageOfCompletionType;
import de.immobilienscout24.rest.schema.common._1.Price;
/**
* Eigenschaften fr den Immobilientyp "Typenhuser"
*
* <p>Java-Klasse fr HouseType complex type.
*
* <p>Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist.
*
* <pre>
* <complexType name="HouseType">
* <complexContent>
* <extension base="{http://rest.immobilienscout24.de/schema/offer/realestates/1.0}RealEstate">
* <sequence>
* <group ref="{http://rest.immobilienscout24.de/schema/common/1.0}ExtendedHouseTypeGroup"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "HouseType", propOrder = {
"price",
"livingSpace",
"totalArea",
"baseArea",
"numberOfRooms",
"courtage",
"constructionMethod",
"buildingType",
"stageOfCompletionType",
"energyStandardType",
"uValue",
"typeInformationNote",
"modelInformationNote",
"contructionPriceInformationNote",
"floorInformationNote",
"roofInformationNote"
})
public class HouseType
extends RealEstate
{
@XmlElement(required = true)
protected Price price;
protected double livingSpace;
protected Double totalArea;
protected Double baseArea;
protected Double numberOfRooms;
protected CourtageInfo courtage;
protected HouseTypeConstructionMethodType constructionMethod;
@XmlElement(required = true)
protected HouseTypeBuildingType buildingType;
protected HouseTypeStageOfCompletionType stageOfCompletionType;
protected HouseTypeEnergyStandardType energyStandardType;
protected Double uValue;
@XmlJavaTypeAdapter(Adapter1 .class)
protected String typeInformationNote;
@XmlJavaTypeAdapter(Adapter1 .class)
protected String modelInformationNote;
@XmlJavaTypeAdapter(Adapter1 .class)
protected String contructionPriceInformationNote;
@XmlJavaTypeAdapter(Adapter1 .class)
protected String floorInformationNote;
@XmlJavaTypeAdapter(Adapter1 .class)
protected String roofInformationNote;
/**
* Ruft den Wert der price-Eigenschaft ab.
*
* @return
* possible object is
* {@link Price }
*
*/
public Price getPrice() {
return price;
}
/**
* Legt den Wert der price-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link Price }
*
*/
public void setPrice(Price value) {
this.price = value;
}
/**
* Ruft den Wert der livingSpace-Eigenschaft ab.
*
*/
public double getLivingSpace() {
return livingSpace;
}
/**
* Legt den Wert der livingSpace-Eigenschaft fest.
*
*/
public void setLivingSpace(double value) {
this.livingSpace = value;
}
/**
* Ruft den Wert der totalArea-Eigenschaft ab.
*
* @return
* possible object is
* {@link Double }
*
*/
public Double getTotalArea() {
return totalArea;
}
/**
* Legt den Wert der totalArea-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link Double }
*
*/
public void setTotalArea(Double value) {
this.totalArea = value;
}
/**
* Ruft den Wert der baseArea-Eigenschaft ab.
*
* @return
* possible object is
* {@link Double }
*
*/
public Double getBaseArea() {
return baseArea;
}
/**
* Legt den Wert der baseArea-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link Double }
*
*/
public void setBaseArea(Double value) {
this.baseArea = value;
}
/**
* Ruft den Wert der numberOfRooms-Eigenschaft ab.
*
* @return
* possible object is
* {@link Double }
*
*/
public Double getNumberOfRooms() {
return numberOfRooms;
}
/**
* Legt den Wert der numberOfRooms-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link Double }
*
*/
public void setNumberOfRooms(Double value) {
this.numberOfRooms = value;
}
/**
* Ruft den Wert der courtage-Eigenschaft ab.
*
* @return
* possible object is
* {@link CourtageInfo }
*
*/
public CourtageInfo getCourtage() {
return courtage;
}
/**
* Legt den Wert der courtage-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link CourtageInfo }
*
*/
public void setCourtage(CourtageInfo value) {
this.courtage = value;
}
/**
* Ruft den Wert der constructionMethod-Eigenschaft ab.
*
* @return
* possible object is
* {@link HouseTypeConstructionMethodType }
*
*/
public HouseTypeConstructionMethodType getConstructionMethod() {
return constructionMethod;
}
/**
* Legt den Wert der constructionMethod-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link HouseTypeConstructionMethodType }
*
*/
public void setConstructionMethod(HouseTypeConstructionMethodType value) {
this.constructionMethod = value;
}
/**
* Ruft den Wert der buildingType-Eigenschaft ab.
*
* @return
* possible object is
* {@link HouseTypeBuildingType }
*
*/
public HouseTypeBuildingType getBuildingType() {
return buildingType;
}
/**
* Legt den Wert der buildingType-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link HouseTypeBuildingType }
*
*/
public void setBuildingType(HouseTypeBuildingType value) {
this.buildingType = value;
}
/**
* Ruft den Wert der stageOfCompletionType-Eigenschaft ab.
*
* @return
* possible object is
* {@link HouseTypeStageOfCompletionType }
*
*/
public HouseTypeStageOfCompletionType getStageOfCompletionType() {
return stageOfCompletionType;
}
/**
* Legt den Wert der stageOfCompletionType-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link HouseTypeStageOfCompletionType }
*
*/
public void setStageOfCompletionType(HouseTypeStageOfCompletionType value) {
this.stageOfCompletionType = value;
}
/**
* Ruft den Wert der energyStandardType-Eigenschaft ab.
*
* @return
* possible object is
* {@link HouseTypeEnergyStandardType }
*
*/
public HouseTypeEnergyStandardType getEnergyStandardType() {
return energyStandardType;
}
/**
* Legt den Wert der energyStandardType-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link HouseTypeEnergyStandardType }
*
*/
public void setEnergyStandardType(HouseTypeEnergyStandardType value) {
this.energyStandardType = value;
}
/**
* Ruft den Wert der uValue-Eigenschaft ab.
*
* @return
* possible object is
* {@link Double }
*
*/
public Double getUValue() {
return uValue;
}
/**
* Legt den Wert der uValue-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link Double }
*
*/
public void setUValue(Double value) {
this.uValue = value;
}
/**
* Ruft den Wert der typeInformationNote-Eigenschaft ab.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTypeInformationNote() {
return typeInformationNote;
}
/**
* Legt den Wert der typeInformationNote-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTypeInformationNote(String value) {
this.typeInformationNote = value;
}
/**
* Ruft den Wert der modelInformationNote-Eigenschaft ab.
*
* @return
* possible object is
* {@link String }
*
*/
public String getModelInformationNote() {
return modelInformationNote;
}
/**
* Legt den Wert der modelInformationNote-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setModelInformationNote(String value) {
this.modelInformationNote = value;
}
/**
* Ruft den Wert der contructionPriceInformationNote-Eigenschaft ab.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContructionPriceInformationNote() {
return contructionPriceInformationNote;
}
/**
* Legt den Wert der contructionPriceInformationNote-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContructionPriceInformationNote(String value) {
this.contructionPriceInformationNote = value;
}
/**
* Ruft den Wert der floorInformationNote-Eigenschaft ab.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFloorInformationNote() {
return floorInformationNote;
}
/**
* Legt den Wert der floorInformationNote-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFloorInformationNote(String value) {
this.floorInformationNote = value;
}
/**
* Ruft den Wert der roofInformationNote-Eigenschaft ab.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRoofInformationNote() {
return roofInformationNote;
}
/**
* Legt den Wert der roofInformationNote-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRoofInformationNote(String value) {
this.roofInformationNote = value;
}
}
| |
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.loader.data;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Semaphore;
/**
* {@link RandomAccessData} implementation backed by a {@link RandomAccessFile}.
*
* @author Phillip Webb
*/
public class RandomAccessDataFile implements RandomAccessData {
private static final int DEFAULT_CONCURRENT_READS = 4;
private final File file;
private final FilePool filePool;
private final long offset;
private final long length;
/**
* Create a new {@link RandomAccessDataFile} backed by the specified file.
* @param file the underlying file
* @throws IllegalArgumentException if the file is null or does not exist
* @see #RandomAccessDataFile(File, int)
*/
public RandomAccessDataFile(File file) {
this(file, DEFAULT_CONCURRENT_READS);
}
/**
* Create a new {@link RandomAccessDataFile} backed by the specified file.
* @param file the underlying file
* @param concurrentReads the maximum number of concurrent reads allowed on the
* underlying file before blocking
* @throws IllegalArgumentException if the file is null or does not exist
* @see #RandomAccessDataFile(File)
*/
public RandomAccessDataFile(File file, int concurrentReads) {
if (file == null) {
throw new IllegalArgumentException("File must not be null");
}
if (!file.exists()) {
throw new IllegalArgumentException("File must exist");
}
this.file = file;
this.filePool = new FilePool(concurrentReads);
this.offset = 0L;
this.length = file.length();
}
/**
* Private constructor used to create a {@link #getSubsection(long, long) subsection}.
* @param file the underlying file
* @param pool the underlying pool
* @param offset the offset of the section
* @param length the length of the section
*/
private RandomAccessDataFile(File file, FilePool pool, long offset, long length) {
this.file = file;
this.filePool = pool;
this.offset = offset;
this.length = length;
}
/**
* Returns the underlying File.
* @return the underlying file
*/
public File getFile() {
return this.file;
}
@Override
public InputStream getInputStream(ResourceAccess access) throws IOException {
return new DataInputStream(access);
}
@Override
public RandomAccessData getSubsection(long offset, long length) {
if (offset < 0 || length < 0 || offset + length > this.length) {
throw new IndexOutOfBoundsException();
}
return new RandomAccessDataFile(this.file, this.filePool, this.offset + offset,
length);
}
@Override
public long getSize() {
return this.length;
}
public void close() throws IOException {
this.filePool.close();
}
/**
* {@link RandomAccessDataInputStream} implementation for the
* {@link RandomAccessDataFile}.
*/
private class DataInputStream extends InputStream {
private RandomAccessFile file;
private int position;
DataInputStream(ResourceAccess access) throws IOException {
if (access == ResourceAccess.ONCE) {
this.file = new RandomAccessFile(RandomAccessDataFile.this.file, "r");
this.file.seek(RandomAccessDataFile.this.offset);
}
}
@Override
public int read() throws IOException {
return doRead(null, 0, 1);
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b == null ? 0 : b.length);
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException("Bytes must not be null");
}
return doRead(b, off, len);
}
/**
* Perform the actual read.
* @param b the bytes to read or {@code null} when reading a single byte
* @param off the offset of the byte array
* @param len the length of data to read
* @return the number of bytes read into {@code b} or the actual read byte if
* {@code b} is {@code null}. Returns -1 when the end of the stream is reached
* @throws IOException in case of I/O errors
*/
public int doRead(byte[] b, int off, int len) throws IOException {
if (len == 0) {
return 0;
}
int cappedLen = cap(len);
if (cappedLen <= 0) {
return -1;
}
RandomAccessFile file = this.file;
if (file == null) {
file = RandomAccessDataFile.this.filePool.acquire();
file.seek(RandomAccessDataFile.this.offset + this.position);
}
try {
if (b == null) {
int rtn = file.read();
moveOn(rtn == -1 ? 0 : 1);
return rtn;
}
else {
return (int) moveOn(file.read(b, off, cappedLen));
}
}
finally {
if (this.file == null) {
RandomAccessDataFile.this.filePool.release(file);
}
}
}
@Override
public long skip(long n) throws IOException {
return (n <= 0 ? 0 : moveOn(cap(n)));
}
@Override
public void close() throws IOException {
if (this.file != null) {
this.file.close();
}
}
/**
* Cap the specified value such that it cannot exceed the number of bytes
* remaining.
* @param n the value to cap
* @return the capped value
*/
private int cap(long n) {
return (int) Math.min(RandomAccessDataFile.this.length - this.position, n);
}
/**
* Move the stream position forwards the specified amount.
* @param amount the amount to move
* @return the amount moved
*/
private long moveOn(int amount) {
this.position += amount;
return amount;
}
}
/**
* Manage a pool that can be used to perform concurrent reads on the underlying
* {@link RandomAccessFile}.
*/
private class FilePool {
private final int size;
private final Semaphore available;
private final Queue<RandomAccessFile> files;
FilePool(int size) {
this.size = size;
this.available = new Semaphore(size);
this.files = new ConcurrentLinkedQueue<RandomAccessFile>();
}
public RandomAccessFile acquire() throws IOException {
this.available.acquireUninterruptibly();
RandomAccessFile file = this.files.poll();
return (file == null
? new RandomAccessFile(RandomAccessDataFile.this.file, "r") : file);
}
public void release(RandomAccessFile file) {
this.files.add(file);
this.available.release();
}
public void close() throws IOException {
this.available.acquireUninterruptibly(this.size);
try {
RandomAccessFile file = this.files.poll();
while (file != null) {
file.close();
file = this.files.poll();
}
}
finally {
this.available.release(this.size);
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.Domain;
import com.facebook.presto.spi.SortedRangeSet;
import com.facebook.presto.spi.TupleDomain;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.DistinctLimitNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanVisitor;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.tree.BooleanLiteral;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.ExpressionTreeRewriter;
import com.facebook.presto.sql.tree.QualifiedNameReference;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.facebook.presto.sql.ExpressionUtils.combineConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.expressionOrNullSymbols;
import static com.facebook.presto.sql.ExpressionUtils.extractConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.stripNonDeterministicConjuncts;
import static com.facebook.presto.sql.planner.EqualityInference.createEqualityInference;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
/**
* Computes the effective predicate at the top of the specified PlanNode
* <p/>
* Note: non-deterministic predicates can not be pulled up (so they will be ignored)
*/
public class EffectivePredicateExtractor
extends PlanVisitor<Void, Expression>
{
public static Expression extract(PlanNode node)
{
return node.accept(new EffectivePredicateExtractor(), null);
}
@Override
protected Expression visitPlan(PlanNode node, Void context)
{
return BooleanLiteral.TRUE_LITERAL;
}
@Override
public Expression visitAggregation(AggregationNode node, Void context)
{
Expression underlyingPredicate = node.getSource().accept(this, context);
return pullExpressionThroughSymbols(underlyingPredicate, node.getGroupBy());
}
@Override
public Expression visitFilter(FilterNode node, Void context)
{
Expression underlyingPredicate = node.getSource().accept(this, context);
Expression predicate = node.getPredicate();
// Remove non-deterministic conjuncts
predicate = stripNonDeterministicConjuncts(predicate);
return combineConjuncts(predicate, underlyingPredicate);
}
private static Predicate<Map.Entry<Symbol, Expression>> symbolMatchesExpression()
{
return new Predicate<Map.Entry<Symbol, Expression>>()
{
@Override
public boolean apply(Map.Entry<Symbol, Expression> entry)
{
return entry.getValue().equals(new QualifiedNameReference(entry.getKey().toQualifiedName()));
}
};
}
@Override
public Expression visitProject(ProjectNode node, Void context)
{
// TODO: add simple algebraic solver for projection translation (right now only considers identity projections)
Expression underlyingPredicate = node.getSource().accept(this, context);
Iterable<Expression> projectionEqualities = transform(filter(node.getOutputMap().entrySet(), not(symbolMatchesExpression())),
new Function<Map.Entry<Symbol, Expression>, Expression>()
{
@Override
public Expression apply(Map.Entry<Symbol, Expression> entry)
{
QualifiedNameReference reference = new QualifiedNameReference(entry.getKey().toQualifiedName());
Expression expression = entry.getValue();
return new ComparisonExpression(ComparisonExpression.Type.EQUAL, reference, expression);
}
});
return pullExpressionThroughSymbols(combineConjuncts(
ImmutableList.<Expression>builder()
.addAll(projectionEqualities)
.add(underlyingPredicate)
.build()),
node.getOutputSymbols());
}
@Override
public Expression visitTopN(TopNNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Expression visitLimit(LimitNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Expression visitDistinctLimit(DistinctLimitNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Expression visitTableScan(TableScanNode node, Void context)
{
if (!node.getGeneratedPartitions().isPresent()) {
return BooleanLiteral.TRUE_LITERAL;
}
// The effective predicate can be computed from the intersection of the aggregate partition TupleDomain summary (generated from Partitions)
// and the TupleDomain that was initially used to generate those Partitions. We do this because we need to select the more restrictive of the two.
// Note: the TupleDomain used to generate the partitions may contain columns/predicates that are unknown to the partition TupleDomain summary,
// but those are guaranteed to be part of a FilterNode directly above this table scan, so it's ok to include.
TupleDomain tupleDomain = node.getPartitionsDomainSummary().intersect(node.getGeneratedPartitions().get().getTupleDomainInput());
// A TupleDomain that has too many disjunctions will produce an Expression that will be very expensive to evaluate at runtime.
// For the time being, we will just summarize the TupleDomain by the span over each of its columns (which is ok since we only need to generate
// an effective predicate here).
// In the future, we can do further optimizations here that will simplify the TupleDomain, but still improve the specificity compared to just a simple span (e.g. range clustering).
tupleDomain = spanTupleDomain(tupleDomain);
Expression partitionPredicate = DomainTranslator.toPredicate(tupleDomain, ImmutableBiMap.copyOf(node.getAssignments()).inverse());
return pullExpressionThroughSymbols(partitionPredicate, node.getOutputSymbols());
}
private static TupleDomain spanTupleDomain(TupleDomain tupleDomain)
{
if (tupleDomain.isNone()) {
return tupleDomain;
}
Map<ColumnHandle, Domain> spannedDomains = Maps.transformValues(tupleDomain.getDomains(), new Function<Domain, Domain>()
{
@Override
public Domain apply(Domain domain)
{
// Retain nullability, but collapse each SortedRangeSet into a single span
return Domain.create(getSortedRangeSpan(domain.getRanges()), domain.isNullAllowed());
}
});
return TupleDomain.withColumnDomains(spannedDomains);
}
private static SortedRangeSet getSortedRangeSpan(SortedRangeSet rangeSet)
{
return rangeSet.isNone() ? SortedRangeSet.none(rangeSet.getType()) : SortedRangeSet.of(rangeSet.getSpan());
}
@Override
public Expression visitSort(SortNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Expression visitWindow(WindowNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Expression visitUnion(UnionNode node, Void context)
{
Expression firstUnderlyingPredicate = node.getSources().get(0).accept(this, context);
// Rewrite in terms of output symbols
Expression firstOutputPredicate = ExpressionTreeRewriter.rewriteWith(new ExpressionSymbolInliner(node.outputSymbolMap(0)), firstUnderlyingPredicate);
Set<Expression> conjuncts = ImmutableSet.copyOf(extractConjuncts(firstOutputPredicate));
// Find the intersection of all predicates
for (int i = 1; i < node.getSources().size(); i++) {
Expression underlyingPredicate = node.getSources().get(i).accept(this, context);
// Rewrite in terms of output symbols
Expression outputPredicate = ExpressionTreeRewriter.rewriteWith(new ExpressionSymbolInliner(node.outputSymbolMap(i)), underlyingPredicate);
// TODO: use a more precise way to determine overlapping conjuncts (e.g. commutative predicates)
conjuncts = Sets.intersection(conjuncts, ImmutableSet.copyOf(extractConjuncts(outputPredicate)));
}
return combineConjuncts(conjuncts);
}
@Override
public Expression visitJoin(JoinNode node, Void context)
{
Expression leftPredicate = node.getLeft().accept(this, context);
Expression rightPredicate = node.getRight().accept(this, context);
List<Expression> joinConjuncts = new ArrayList<>();
for (JoinNode.EquiJoinClause clause : node.getCriteria()) {
joinConjuncts.add(new ComparisonExpression(ComparisonExpression.Type.EQUAL,
new QualifiedNameReference(clause.getLeft().toQualifiedName()),
new QualifiedNameReference(clause.getRight().toQualifiedName())));
}
switch (node.getType()) {
case INNER:
case CROSS:
return combineConjuncts(ImmutableList.<Expression>builder()
.add(leftPredicate)
.add(rightPredicate)
.addAll(joinConjuncts)
.build());
case LEFT:
return combineConjuncts(ImmutableList.<Expression>builder()
.add(leftPredicate)
.addAll(transform(extractConjuncts(rightPredicate), expressionOrNullSymbols(in(node.getRight().getOutputSymbols()))))
.addAll(transform(joinConjuncts, expressionOrNullSymbols(in(node.getRight().getOutputSymbols()))))
.build());
case RIGHT:
return combineConjuncts(ImmutableList.<Expression>builder()
.add(rightPredicate)
.addAll(transform(extractConjuncts(leftPredicate), expressionOrNullSymbols(in(node.getLeft().getOutputSymbols()))))
.addAll(transform(joinConjuncts, expressionOrNullSymbols(in(node.getLeft().getOutputSymbols()))))
.build());
default:
throw new UnsupportedOperationException("Unknown join type: " + node.getType());
}
}
@Override
public Expression visitSemiJoin(SemiJoinNode node, Void context)
{
// Filtering source does not change the effective predicate over the output symbols
return node.getSource().accept(this, context);
}
private static Expression pullExpressionThroughSymbols(Expression expression, Collection<Symbol> symbols)
{
EqualityInference equalityInference = createEqualityInference(expression);
ImmutableList.Builder<Expression> effectiveConjuncts = ImmutableList.builder();
for (Expression conjunct : EqualityInference.nonInferrableConjuncts(expression)) {
Expression rewritten = equalityInference.rewriteExpression(conjunct, in(symbols));
if (rewritten != null) {
effectiveConjuncts.add(rewritten);
}
}
effectiveConjuncts.addAll(equalityInference.generateEqualitiesPartitionedBy(in(symbols)).getScopeEqualities());
return combineConjuncts(effectiveConjuncts.build());
}
}
| |
package nu.annat.simplesql.sqliteandroid;
import android.database.Cursor;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Date;
import nu.annat.simplesql.HelperColumnMapping;
import nu.annat.simplesql.HelperColumnMapping.ColumnMap;
import nu.annat.simplesql.HelperResultSet;
/**
* Sql lite implementation of the HelperResultSet.
* This converts the sqlite and adroid specific calls to be used in a common way by the framework.
*/
public class SqliteResultSet implements HelperResultSet {
private final Cursor cursor;
HelperColumnMapping mapping = null;
private boolean wasNull = false;
public SqliteResultSet(Cursor cursor) {
this.cursor = cursor;
}
public Cursor getCursor() {
return cursor;
}
@Override
public boolean next() {
return cursor.moveToNext();
}
@Override
public void close() {
cursor.close();
}
@Override
public HelperColumnMapping getColumnMapping() throws SQLException {
if (mapping == null) {
mapping = new HelperColumnMapping();
for (int i = 0; i < cursor.getColumnCount(); i++) {
ColumnMap map = new ColumnMap();
map.index = i;
map.name = cursor.getColumnName(i);
mapping.add(map);
}
}
return mapping;
}
@Override
public int getIndexOf(String columnName) {
return cursor.getColumnIndex(columnName);
}
@Override
public int getFirstColumn() {
return 0;
}
@Override
public int getColumnCount() {
return cursor.getColumnCount();
}
@Override
public String getString(int i) {
if (cursor.isNull(i)) {
wasNull = true;
return null;
} else {
wasNull = false;
return cursor.getString(i);
}
}
@Override
public Integer getIntObject(int i) {
if (cursor.isNull(i)) {
wasNull = true;
return null;
} else {
wasNull = false;
return Integer.valueOf(cursor.getInt(i));
}
}
@Override
public int getInt(int i) {
return getInt(i,0);
}
@Override
public int getInt(int i, int whenNull) {
wasNull = false;
try {
return cursor.getInt(i);
} catch (Exception e) {
if (cursor.isNull(i)) {
wasNull = true;
return whenNull;
} else {
throw e;
}
}
}
@Override
public Long getLongObject(int i) {
if (cursor.isNull(i)) {
wasNull = true;
return null;
} else {
wasNull = false;
return Long.valueOf(cursor.getLong(i));
}
}
@Override
public long getLong(int i) {
return getLong(i, 0);
}
@Override
public long getLong(int i, long whenNull) {
wasNull = false;
try {
return cursor.getLong(i);
} catch (Exception e) {
if (cursor.isNull(i)) {
wasNull = true;
return whenNull;
} else {
throw e;
}
}
}
@Override
public Timestamp getTimestamp(int i) {
throw new RuntimeException("Not implemented");
}
@Override
public Double getDoubleObj(int i) {
if (cursor.isNull(i)) {
wasNull = true;
return null;
} else {
wasNull = false;
return Double.valueOf(cursor.getDouble(i));
}
}
@Override
public double getDouble(int i) {
return getDouble(i, 0);
}
@Override
public double getDouble(int i, double whenNull) {
wasNull = false;
try {
return cursor.getDouble(i);
} catch (Exception e) {
if (cursor.isNull(i)) {
wasNull = true;
return whenNull;
} else {
throw e;
}
}
}
@Override
public Float getFloatObj(int i) {
if (cursor.isNull(i)) {
wasNull = true;
return null;
} else {
wasNull = false;
return Float.valueOf(cursor.getFloat(i));
}
}
@Override
public float getFloat(int i) {
return getFloat(i, 0);
}
@Override
public float getFloat(int i, float whenNull) {
wasNull = false;
try {
return cursor.getFloat(i);
} catch (Exception e) {
if (cursor.isNull(i)) {
wasNull = true;
return whenNull;
} else {
throw e;
}
}
}
@Override
public Boolean getBooleanObj(int i) {
throw new RuntimeException("Not implemented");
}
@Override
public boolean getBoolean(int i, boolean whenNull) {
wasNull = false;
try {
return cursor.getString(i).startsWith("t");
} catch (Exception e) {
if (cursor.isNull(i)) {
wasNull = true;
return whenNull;
} else {
throw e;
}
}
}
/**
* @Override
public boolean getBoolean(int i, boolean whenNull) {
wasNull = false;
if (cursor.isNull(i)) {
wasNull = true;
return whenNull;
} else {
return cursor.getString(i).startsWith("t");
}
}*/
@Override
public Date getDate(int i) {
throw new RuntimeException("Not implemented");
}
@Override
public boolean wasNull() {
return wasNull;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2;
import java.io.IOException;
import java.net.InetSocketAddress;
import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskResponse;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenResponse;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl;
import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl;
import org.junit.Test;
public class TestRPCFactories {
@Test
public void test() {
testPbServerFactory();
testPbClientFactory();
}
private void testPbServerFactory() {
InetSocketAddress addr = new InetSocketAddress(0);
Configuration conf = new Configuration();
MRClientProtocol instance = new MRClientProtocolTestImpl();
Server server = null;
try {
server =
RpcServerFactoryPBImpl.get().getServer(
MRClientProtocol.class, instance, addr, conf, null, 1);
server.start();
} catch (YarnRuntimeException e) {
e.printStackTrace();
Assert.fail("Failed to crete server");
} finally {
server.stop();
}
}
private void testPbClientFactory() {
InetSocketAddress addr = new InetSocketAddress(0);
System.err.println(addr.getHostName() + addr.getPort());
Configuration conf = new Configuration();
MRClientProtocol instance = new MRClientProtocolTestImpl();
Server server = null;
try {
server =
RpcServerFactoryPBImpl.get().getServer(
MRClientProtocol.class, instance, addr, conf, null, 1);
server.start();
System.err.println(server.getListenerAddress());
System.err.println(NetUtils.getConnectAddress(server));
MRClientProtocol client = null;
try {
client = (MRClientProtocol) RpcClientFactoryPBImpl.get().getClient(MRClientProtocol.class, 1, NetUtils.getConnectAddress(server), conf);
} catch (YarnRuntimeException e) {
e.printStackTrace();
Assert.fail("Failed to crete client");
}
} catch (YarnRuntimeException e) {
e.printStackTrace();
Assert.fail("Failed to crete server");
} finally {
server.stop();
}
}
public class MRClientProtocolTestImpl implements MRClientProtocol {
@Override
public InetSocketAddress getConnectAddress() {
return null;
}
@Override
public GetJobReportResponse getJobReport(GetJobReportRequest request)
throws IOException {
return null;
}
@Override
public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
throws IOException {
return null;
}
@Override
public GetTaskAttemptReportResponse getTaskAttemptReport(
GetTaskAttemptReportRequest request) throws IOException {
return null;
}
@Override
public GetCountersResponse getCounters(GetCountersRequest request)
throws IOException {
return null;
}
@Override
public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
GetTaskAttemptCompletionEventsRequest request)
throws IOException {
return null;
}
@Override
public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request)
throws IOException {
return null;
}
@Override
public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request)
throws IOException {
return null;
}
@Override
public KillJobResponse killJob(KillJobRequest request)
throws IOException {
return null;
}
@Override
public KillTaskResponse killTask(KillTaskRequest request)
throws IOException {
return null;
}
@Override
public KillTaskAttemptResponse killTaskAttempt(
KillTaskAttemptRequest request) throws IOException {
return null;
}
@Override
public FailTaskAttemptResponse failTaskAttempt(
FailTaskAttemptRequest request) throws IOException {
return null;
}
@Override
public GetDelegationTokenResponse getDelegationToken(
GetDelegationTokenRequest request) throws IOException {
return null;
}
@Override
public RenewDelegationTokenResponse renewDelegationToken(
RenewDelegationTokenRequest request) throws IOException {
return null;
}
@Override
public CancelDelegationTokenResponse cancelDelegationToken(
CancelDelegationTokenRequest request) throws IOException {
return null;
}
}
}
| |
package com.fsck.k9.message.html;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
public class HttpUriParserTest {
private final HttpUriParser parser = new HttpUriParser();
@Test
public void emptyUriIgnored() {
assertInvalidUri("http://");
}
@Test
public void emptyAuthorityIgnored() {
assertInvalidUri("http:///");
}
@Test
public void simpleDomain() {
assertValidUri("http://www.google.com");
}
@Test
public void simpleDomainWithHttps() {
assertValidUri("https://www.google.com");
}
@Test
public void simpleRtspUri() {
assertValidUri("rtsp://example.com/media.mp4");
}
@Test
public void invalidDomainIgnored() {
assertInvalidUri("http://-www.google.com");
}
@Test
public void domainWithTrailingSlash() {
assertValidUri("http://www.google.com/");
}
@Test
public void domainWithUserInfo() {
assertValidUri("http://test@google.com/");
}
@Test
public void domainWithFullUserInfo() {
assertValidUri("http://test:secret@google.com/");
}
@Test
public void domainWithoutWww() {
assertValidUri("http://google.com/");
}
@Test
public void query() {
assertValidUri("http://google.com/give/me/?q=mode&c=information");
}
@Test
public void fragment() {
assertValidUri("http://google.com/give/me#only-the-best");
}
@Test
public void queryAndFragment() {
assertValidUri("http://google.com/give/me/?q=mode&c=information#only-the-best");
}
@Test
public void ipv4Address() {
assertValidUri("http://127.0.0.1");
}
@Test
public void ipv4AddressWithTrailingSlash() {
assertValidUri("http://127.0.0.1/");
}
@Test
public void ipv4AddressWithEmptyPort() {
assertValidUri("http://127.0.0.1:");
}
@Test
public void ipv4AddressWithPort() {
assertValidUri("http://127.0.0.1:524/");
}
@Test
public void ipv6Address() {
assertValidUri("http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]");
}
@Test
public void ipv6AddressWithPort() {
assertValidUri("http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:80");
}
@Test
public void ipv6AddressWithTrailingSlash() {
assertValidUri("http://[1080:0:0:0:8:800:200C:417A]/");
}
@Test
public void ipv6AddressWithEndCompression() {
assertValidUri("http://[3ffe:2a00:100:7031::1]");
}
@Test
public void ipv6AddressWithBeginCompression() {
assertValidUri("http://[1080::8:800:200C:417A]/");
}
@Test
public void ipv6AddressWithCompressionPort() {
assertValidUri("http://[::FFFF:129.144.52.38]:80/");
}
@Test
public void ipv6AddressWithPrependedCompression() {
assertValidUri("http://[::192.9.5.5]/");
}
@Test
public void ipv6AddressWithTrailingIp4AndPort() {
assertValidUri("http://[::192.9.5.5]:80/");
}
@Test
public void ipv6WithoutClosingSquareBracketIgnored() {
assertInvalidUri("http://[1080:0:0:0:8:80:200C:417A/");
}
@Test
public void ipv6InvalidClosingSquareBracketIgnored() {
assertInvalidUri("http://[1080:0:0:0:8:800:270C:417A/]");
}
@Test
public void domainWithTrailingSpace() {
String text = "http://google.com/ ";
UriMatch uriMatch = parser.parseUri(text, 0);
assertUriMatch("http://google.com/", uriMatch);
}
@Test
public void domainWithTrailingNewline() {
String text = "http://google.com/\n";
UriMatch uriMatch = parser.parseUri(text, 0);
assertUriMatch("http://google.com/", uriMatch);
}
@Test
public void domainWithTrailingAngleBracket() {
String text = "<http://google.com/>";
UriMatch uriMatch = parser.parseUri(text, 1);
assertUriMatch("http://google.com/", uriMatch, 1);
}
@Test
public void uriInMiddleAfterInput() {
String prefix = "prefix ";
String uri = "http://google.com/";
String text = prefix + uri;
UriMatch uriMatch = parser.parseUri(text, prefix.length());
assertUriMatch("http://google.com/", uriMatch, prefix.length());
}
@Test
public void uriInMiddleOfInput() {
String prefix = "prefix ";
String uri = "http://google.com/";
String postfix = " postfix";
String text = prefix + uri + postfix;
UriMatch uriMatch = parser.parseUri(text, prefix.length());
assertUriMatch("http://google.com/", uriMatch, prefix.length());
}
private void assertValidUri(String uri) {
UriMatch uriMatch = parser.parseUri(uri, 0);
assertUriMatch(uri, uriMatch);
}
private void assertUriMatch(String uri, UriMatch uriMatch) {
assertUriMatch(uri, uriMatch, 0);
}
private void assertUriMatch(String uri, UriMatch uriMatch, int offset) {
assertNotNull(uriMatch);
Assert.assertEquals(offset, uriMatch.getStartIndex());
Assert.assertEquals(uri.length() + offset, uriMatch.getEndIndex());
Assert.assertEquals(uri, uriMatch.getUri().toString());
}
private void assertInvalidUri(String uri) {
UriMatch uriMatch = parser.parseUri(uri, 0);
assertNull(uriMatch);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus;
import org.apache.hadoop.mapreduce.ClusterMetrics;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.QueueAclsInfo;
import org.apache.hadoop.mapreduce.QueueInfo;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskCompletionEvent;
import org.apache.hadoop.mapreduce.TaskReport;
import org.apache.hadoop.mapreduce.TaskTrackerInfo;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.mapreduce.v2.LogParams;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.DelegationToken;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.ProtoUtils;
import com.google.common.annotations.VisibleForTesting;
/**
* This class enables the current JobClient (0.22 hadoop) to run on YARN.
*/
@SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
public class YARNRunner implements ClientProtocol {
private static final Log LOG = LogFactory.getLog(YARNRunner.class);
private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
private ResourceMgrDelegate resMgrDelegate;
private ClientCache clientCache;
private Configuration conf;
private final FileContext defaultFileContext;
/* usually is false unless the jobclient get delegation token is
* called. This is a hack wherein we do return a token from RM
* on getDelegationtoken but due to the restricted api on jobclient
* we just add a job history DT token when submitting a job.
*/
private static final boolean DEFAULT_HS_DELEGATION_TOKEN_REQUIRED =
false;
/**
* Yarn runner incapsulates the client interface of
* yarn
* @param conf the configuration object for the client
*/
public YARNRunner(Configuration conf) {
this(conf, new ResourceMgrDelegate(new YarnConfiguration(conf)));
}
/**
* Similar to {@link #YARNRunner(Configuration)} but allowing injecting
* {@link ResourceMgrDelegate}. Enables mocking and testing.
* @param conf the configuration object for the client
* @param resMgrDelegate the resourcemanager client handle.
*/
public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate) {
this(conf, resMgrDelegate, new ClientCache(conf, resMgrDelegate));
}
/**
* Similar to {@link YARNRunner#YARNRunner(Configuration, ResourceMgrDelegate)}
* but allowing injecting {@link ClientCache}. Enable mocking and testing.
* @param conf the configuration object
* @param resMgrDelegate the resource manager delegate
* @param clientCache the client cache object.
*/
public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate,
ClientCache clientCache) {
this.conf = conf;
try {
this.resMgrDelegate = resMgrDelegate;
this.clientCache = clientCache;
this.defaultFileContext = FileContext.getFileContext(this.conf);
} catch (UnsupportedFileSystemException ufe) {
throw new RuntimeException("Error in instantiating YarnClient", ufe);
}
}
@Private
/**
* Used for testing mostly.
* @param resMgrDelegate the resource manager delegate to set to.
*/
public void setResourceMgrDelegate(ResourceMgrDelegate resMgrDelegate) {
this.resMgrDelegate = resMgrDelegate;
}
@Override
public void cancelDelegationToken(Token<DelegationTokenIdentifier> arg0)
throws IOException, InterruptedException {
throw new UnsupportedOperationException("Use Token.renew instead");
}
@Override
public TaskTrackerInfo[] getActiveTrackers() throws IOException,
InterruptedException {
return resMgrDelegate.getActiveTrackers();
}
@Override
public JobStatus[] getAllJobs() throws IOException, InterruptedException {
return resMgrDelegate.getAllJobs();
}
@Override
public TaskTrackerInfo[] getBlacklistedTrackers() throws IOException,
InterruptedException {
return resMgrDelegate.getBlacklistedTrackers();
}
@Override
public ClusterMetrics getClusterMetrics() throws IOException,
InterruptedException {
return resMgrDelegate.getClusterMetrics();
}
@VisibleForTesting
Token<?> getDelegationTokenFromHS(MRClientProtocol hsProxy)
throws IOException, InterruptedException {
GetDelegationTokenRequest request = recordFactory
.newRecordInstance(GetDelegationTokenRequest.class);
request.setRenewer(Master.getMasterPrincipal(conf));
DelegationToken mrDelegationToken = hsProxy.getDelegationToken(request)
.getDelegationToken();
return ProtoUtils.convertFromProtoFormat(mrDelegationToken,
hsProxy.getConnectAddress());
}
@Override
public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer)
throws IOException, InterruptedException {
// The token is only used for serialization. So the type information
// mismatch should be fine.
return resMgrDelegate.getDelegationToken(renewer);
}
@Override
public String getFilesystemName() throws IOException, InterruptedException {
return resMgrDelegate.getFilesystemName();
}
@Override
public JobID getNewJobID() throws IOException, InterruptedException {
return resMgrDelegate.getNewJobID();
}
@Override
public QueueInfo getQueue(String queueName) throws IOException,
InterruptedException {
return resMgrDelegate.getQueue(queueName);
}
@Override
public QueueAclsInfo[] getQueueAclsForCurrentUser() throws IOException,
InterruptedException {
return resMgrDelegate.getQueueAclsForCurrentUser();
}
@Override
public QueueInfo[] getQueues() throws IOException, InterruptedException {
return resMgrDelegate.getQueues();
}
@Override
public QueueInfo[] getRootQueues() throws IOException, InterruptedException {
return resMgrDelegate.getRootQueues();
}
@Override
public QueueInfo[] getChildQueues(String parent) throws IOException,
InterruptedException {
return resMgrDelegate.getChildQueues(parent);
}
@Override
public String getStagingAreaDir() throws IOException, InterruptedException {
return resMgrDelegate.getStagingAreaDir();
}
@Override
public String getSystemDir() throws IOException, InterruptedException {
return resMgrDelegate.getSystemDir();
}
@Override
public long getTaskTrackerExpiryInterval() throws IOException,
InterruptedException {
return resMgrDelegate.getTaskTrackerExpiryInterval();
}
@Override
public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts)
throws IOException, InterruptedException {
/* check if we have a hsproxy, if not, no need */
MRClientProtocol hsProxy = clientCache.getInitializedHSProxy();
if (hsProxy != null) {
// JobClient will set this flag if getDelegationToken is called, if so, get
// the delegation tokens for the HistoryServer also.
if (conf.getBoolean(JobClient.HS_DELEGATION_TOKEN_REQUIRED,
DEFAULT_HS_DELEGATION_TOKEN_REQUIRED)) {
Token hsDT = getDelegationTokenFromHS(hsProxy);
ts.addToken(hsDT.getService(), hsDT);
}
}
// Upload only in security mode: TODO
Path applicationTokensFile =
new Path(jobSubmitDir, MRJobConfig.APPLICATION_TOKENS_FILE);
try {
ts.writeTokenStorageFile(applicationTokensFile, conf);
} catch (IOException e) {
throw new YarnException(e);
}
// Construct necessary information to start the MR AM
ApplicationSubmissionContext appContext =
createApplicationSubmissionContext(conf, jobSubmitDir, ts);
// Submit to ResourceManager
ApplicationId applicationId = resMgrDelegate.submitApplication(appContext);
ApplicationReport appMaster = resMgrDelegate
.getApplicationReport(applicationId);
String diagnostics =
(appMaster == null ?
"application report is null" : appMaster.getDiagnostics());
if (appMaster == null || appMaster.getYarnApplicationState() == YarnApplicationState.FAILED
|| appMaster.getYarnApplicationState() == YarnApplicationState.KILLED) {
throw new IOException("Failed to run job : " +
diagnostics);
}
return clientCache.getClient(jobId).getJobStatus(jobId);
}
private LocalResource createApplicationResource(FileContext fs, Path p, LocalResourceType type)
throws IOException {
LocalResource rsrc = recordFactory.newRecordInstance(LocalResource.class);
FileStatus rsrcStat = fs.getFileStatus(p);
rsrc.setResource(ConverterUtils.getYarnUrlFromPath(fs
.getDefaultFileSystem().resolvePath(rsrcStat.getPath())));
rsrc.setSize(rsrcStat.getLen());
rsrc.setTimestamp(rsrcStat.getModificationTime());
rsrc.setType(type);
rsrc.setVisibility(LocalResourceVisibility.APPLICATION);
return rsrc;
}
public ApplicationSubmissionContext createApplicationSubmissionContext(
Configuration jobConf,
String jobSubmitDir, Credentials ts) throws IOException {
ApplicationId applicationId = resMgrDelegate.getApplicationId();
// Setup resource requirements
Resource capability = recordFactory.newRecordInstance(Resource.class);
capability.setMemory(conf.getInt(MRJobConfig.MR_AM_VMEM_MB,
MRJobConfig.DEFAULT_MR_AM_VMEM_MB));
LOG.debug("AppMaster capability = " + capability);
// Setup LocalResources
Map<String, LocalResource> localResources =
new HashMap<String, LocalResource>();
Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE);
URL yarnUrlForJobSubmitDir = ConverterUtils
.getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem()
.resolvePath(
defaultFileContext.makeQualified(new Path(jobSubmitDir))));
LOG.debug("Creating setup context, jobSubmitDir url is "
+ yarnUrlForJobSubmitDir);
localResources.put(MRJobConfig.JOB_CONF_FILE,
createApplicationResource(defaultFileContext,
jobConfPath, LocalResourceType.FILE));
if (jobConf.get(MRJobConfig.JAR) != null) {
Path jobJarPath = new Path(jobConf.get(MRJobConfig.JAR));
LocalResource rc = createApplicationResource(defaultFileContext,
jobJarPath,
LocalResourceType.PATTERN);
String pattern = conf.getPattern(JobContext.JAR_UNPACK_PATTERN,
JobConf.UNPACK_JAR_PATTERN_DEFAULT).pattern();
rc.setPattern(pattern);
localResources.put(MRJobConfig.JOB_JAR, rc);
} else {
// Job jar may be null. For e.g, for pipes, the job jar is the hadoop
// mapreduce jar itself which is already on the classpath.
LOG.info("Job jar is not present. "
+ "Not adding any jar to the list of resources.");
}
// TODO gross hack
for (String s : new String[] {
MRJobConfig.JOB_SPLIT,
MRJobConfig.JOB_SPLIT_METAINFO,
MRJobConfig.APPLICATION_TOKENS_FILE }) {
localResources.put(
MRJobConfig.JOB_SUBMIT_DIR + "/" + s,
createApplicationResource(defaultFileContext,
new Path(jobSubmitDir, s), LocalResourceType.FILE));
}
// Setup security tokens
DataOutputBuffer dob = new DataOutputBuffer();
ts.writeTokenStorageToStream(dob);
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
// Setup the command to run the AM
List<String> vargs = new ArrayList<String>(8);
vargs.add(Environment.JAVA_HOME.$() + "/bin/java");
// TODO: why do we use 'conf' some places and 'jobConf' others?
long logSize = TaskLog.getTaskLogLength(new JobConf(conf));
String logLevel = jobConf.get(
MRJobConfig.MR_AM_LOG_LEVEL, MRJobConfig.DEFAULT_MR_AM_LOG_LEVEL);
MRApps.addLog4jSystemProperties(logLevel, logSize, vargs);
// Check for Java Lib Path usage in MAP and REDUCE configs
warnForJavaLibPath(conf.get(MRJobConfig.MAP_JAVA_OPTS,""), "map",
MRJobConfig.MAP_JAVA_OPTS, MRJobConfig.MAP_ENV);
warnForJavaLibPath(conf.get(MRJobConfig.MAPRED_MAP_ADMIN_JAVA_OPTS,""), "map",
MRJobConfig.MAPRED_MAP_ADMIN_JAVA_OPTS, MRJobConfig.MAPRED_ADMIN_USER_ENV);
warnForJavaLibPath(conf.get(MRJobConfig.REDUCE_JAVA_OPTS,""), "reduce",
MRJobConfig.REDUCE_JAVA_OPTS, MRJobConfig.REDUCE_ENV);
warnForJavaLibPath(conf.get(MRJobConfig.MAPRED_REDUCE_ADMIN_JAVA_OPTS,""), "reduce",
MRJobConfig.MAPRED_REDUCE_ADMIN_JAVA_OPTS, MRJobConfig.MAPRED_ADMIN_USER_ENV);
// Add AM admin command opts before user command opts
// so that it can be overridden by user
String mrAppMasterAdminOptions = conf.get(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS,
MRJobConfig.DEFAULT_MR_AM_ADMIN_COMMAND_OPTS);
warnForJavaLibPath(mrAppMasterAdminOptions, "app master",
MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, MRJobConfig.MR_AM_ADMIN_USER_ENV);
vargs.add(mrAppMasterAdminOptions);
// Add AM user command opts
String mrAppMasterUserOptions = conf.get(MRJobConfig.MR_AM_COMMAND_OPTS,
MRJobConfig.DEFAULT_MR_AM_COMMAND_OPTS);
warnForJavaLibPath(mrAppMasterUserOptions, "app master",
MRJobConfig.MR_AM_COMMAND_OPTS, MRJobConfig.MR_AM_ENV);
vargs.add(mrAppMasterUserOptions);
vargs.add(MRJobConfig.APPLICATION_MASTER_CLASS);
vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR +
Path.SEPARATOR + ApplicationConstants.STDOUT);
vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR +
Path.SEPARATOR + ApplicationConstants.STDERR);
Vector<String> vargsFinal = new Vector<String>(8);
// Final command
StringBuilder mergedCommand = new StringBuilder();
for (CharSequence str : vargs) {
mergedCommand.append(str).append(" ");
}
vargsFinal.add(mergedCommand.toString());
LOG.debug("Command to launch container for ApplicationMaster is : "
+ mergedCommand);
// Setup the CLASSPATH in environment
// i.e. add { Hadoop jars, job jar, CWD } to classpath.
Map<String, String> environment = new HashMap<String, String>();
MRApps.setClasspath(environment, conf);
// Setup the environment variables for Admin first
MRApps.setEnvFromInputString(environment,
conf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV));
// Setup the environment variables (LD_LIBRARY_PATH, etc)
MRApps.setEnvFromInputString(environment,
conf.get(MRJobConfig.MR_AM_ENV));
// Parse distributed cache
MRApps.setupDistributedCache(jobConf, localResources);
Map<ApplicationAccessType, String> acls
= new HashMap<ApplicationAccessType, String>(2);
acls.put(ApplicationAccessType.VIEW_APP, jobConf.get(
MRJobConfig.JOB_ACL_VIEW_JOB, MRJobConfig.DEFAULT_JOB_ACL_VIEW_JOB));
acls.put(ApplicationAccessType.MODIFY_APP, jobConf.get(
MRJobConfig.JOB_ACL_MODIFY_JOB,
MRJobConfig.DEFAULT_JOB_ACL_MODIFY_JOB));
// Setup ContainerLaunchContext for AM container
ContainerLaunchContext amContainer = BuilderUtils
.newContainerLaunchContext(null, UserGroupInformation
.getCurrentUser().getShortUserName(), capability, localResources,
environment, vargsFinal, null, securityTokens, acls);
// Set up the ApplicationSubmissionContext
ApplicationSubmissionContext appContext =
recordFactory.newRecordInstance(ApplicationSubmissionContext.class);
appContext.setApplicationId(applicationId); // ApplicationId
appContext.setUser( // User name
UserGroupInformation.getCurrentUser().getShortUserName());
appContext.setQueue( // Queue name
jobConf.get(JobContext.QUEUE_NAME,
YarnConfiguration.DEFAULT_QUEUE_NAME));
appContext.setApplicationName( // Job name
jobConf.get(JobContext.JOB_NAME,
YarnConfiguration.DEFAULT_APPLICATION_NAME));
appContext.setCancelTokensWhenComplete(
conf.getBoolean(MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN, true));
appContext.setAMContainerSpec(amContainer); // AM Container
return appContext;
}
@Override
public void setJobPriority(JobID arg0, String arg1) throws IOException,
InterruptedException {
resMgrDelegate.setJobPriority(arg0, arg1);
}
@Override
public long getProtocolVersion(String arg0, long arg1) throws IOException {
return resMgrDelegate.getProtocolVersion(arg0, arg1);
}
@Override
public long renewDelegationToken(Token<DelegationTokenIdentifier> arg0)
throws IOException, InterruptedException {
throw new UnsupportedOperationException("Use Token.renew instead");
}
@Override
public Counters getJobCounters(JobID arg0) throws IOException,
InterruptedException {
return clientCache.getClient(arg0).getJobCounters(arg0);
}
@Override
public String getJobHistoryDir() throws IOException, InterruptedException {
return JobHistoryUtils.getConfiguredHistoryServerDoneDirPrefix(conf);
}
@Override
public JobStatus getJobStatus(JobID jobID) throws IOException,
InterruptedException {
JobStatus status = clientCache.getClient(jobID).getJobStatus(jobID);
return status;
}
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
int arg2) throws IOException, InterruptedException {
return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
@Override
public String[] getTaskDiagnostics(TaskAttemptID arg0) throws IOException,
InterruptedException {
return clientCache.getClient(arg0.getJobID()).getTaskDiagnostics(arg0);
}
@Override
public TaskReport[] getTaskReports(JobID jobID, TaskType taskType)
throws IOException, InterruptedException {
return clientCache.getClient(jobID)
.getTaskReports(jobID, taskType);
}
@Override
public void killJob(JobID arg0) throws IOException, InterruptedException {
/* check if the status is not running, if not send kill to RM */
JobStatus status = clientCache.getClient(arg0).getJobStatus(arg0);
if (status.getState() != JobStatus.State.RUNNING) {
resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId());
return;
}
try {
/* send a kill to the AM */
clientCache.getClient(arg0).killJob(arg0);
long currentTimeMillis = System.currentTimeMillis();
long timeKillIssued = currentTimeMillis;
while ((currentTimeMillis < timeKillIssued + 10000L) && (status.getState()
!= JobStatus.State.KILLED)) {
try {
Thread.sleep(1000L);
} catch(InterruptedException ie) {
/** interrupted, just break */
break;
}
currentTimeMillis = System.currentTimeMillis();
status = clientCache.getClient(arg0).getJobStatus(arg0);
}
} catch(IOException io) {
LOG.debug("Error when checking for application status", io);
}
if (status.getState() != JobStatus.State.KILLED) {
resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId());
}
}
@Override
public boolean killTask(TaskAttemptID arg0, boolean arg1) throws IOException,
InterruptedException {
return clientCache.getClient(arg0.getJobID()).killTask(arg0, arg1);
}
@Override
public AccessControlList getQueueAdmins(String arg0) throws IOException {
return new AccessControlList("*");
}
@Override
public JobTrackerStatus getJobTrackerStatus() throws IOException,
InterruptedException {
return JobTrackerStatus.RUNNING;
}
@Override
public ProtocolSignature getProtocolSignature(String protocol,
long clientVersion, int clientMethodsHash) throws IOException {
return ProtocolSignature.getProtocolSignature(this, protocol, clientVersion,
clientMethodsHash);
}
@Override
public LogParams getLogFileParams(JobID jobID, TaskAttemptID taskAttemptID)
throws IOException {
return clientCache.getClient(jobID).getLogFilePath(jobID, taskAttemptID);
}
private static void warnForJavaLibPath(String opts, String component,
String javaConf, String envConf) {
if (opts != null && opts.contains("-Djava.library.path")) {
LOG.warn("Usage of -Djava.library.path in " + javaConf + " can cause " +
"programs to no longer function if hadoop native libraries " +
"are used. These values should be set as part of the " +
"LD_LIBRARY_PATH in the " + component + " JVM env using " +
envConf + " config settings.");
}
}
}
| |
/*
* Copyright 2015 Caplogic AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.mappedbus;
import io.mappedbus.MappedBusConstants.Commit;
import io.mappedbus.MappedBusConstants.Length;
import io.mappedbus.MappedBusConstants.Rollback;
import io.mappedbus.MappedBusConstants.Structure;
import java.io.EOFException;
import java.io.IOException;
/**
* Class for reading messages from the bus.
* <p>
* Messages can either be message based or byte array based.
* <p>
* The typical usage is as follows:
* <pre>
* {@code
* // Construct a reader
* MappedBusReader reader = new MappedBusReader("/tmp/test", 100000L, 32);
* reader.open();
*
* // A: read messages as objects
* while (true) {
* if (reader.next()) {
* int type = reader.readType();
* if (type == 0) {
* reader.readMessage(priceUpdate)
* }
* }
* }
*
* // B: read messages as byte arrays
* while (true) {
* if (reader.next()) {
* int length = reader.read(buffer, 0);
* }
* }
*
* // Close the reader
* reader.close();
* }
* </pre>
*/
public class MappedBusReader {
protected static final long MAX_TIMEOUT_COUNT = 100;
private final String fileName;
private final long fileSize;
private final int recordSize;
private MemoryMappedFile mem;
private long limit = Structure.Data;
private long initialLimit;
private int maxTimeout = 2000;
protected long timerStart;
protected long timeoutCounter;
private boolean typeRead;
/**
* Constructs a new reader.
*
* @param fileName the name of the memory mapped file
* @param fileSize the maximum size of the file
* @param recordSize the maximum size of a record (excluding status flags and meta data)
*/
public MappedBusReader(String fileName, long fileSize, int recordSize) {
this.fileName = fileName;
this.fileSize = fileSize;
this.recordSize = recordSize;
}
/**
* Opens the reader.
*
* @throws IOException if there was a problem opening the file
*/
public void open() throws IOException {
try {
mem = new MemoryMappedFile(fileName, fileSize);
} catch(Exception e) {
throw new IOException("Unable to open the file: " + fileName, e);
}
initialLimit = mem.getLongVolatile(Structure.Limit);
}
/**
* Sets the time for a reader to wait for a record to be committed.
*
* When the timeout occurs the reader will mark the record as "rolled back" and
* the record is ignored.
*
* @param timeout the timeout in milliseconds
*/
public void setTimeout(int timeout) {
this.maxTimeout = timeout;
}
/**
* Steps forward to the next record if there's one available.
*
* The method has a timeout for how long it will wait for the commit field to be set. When the timeout is
* reached it will set the roll back field and skip over the record.
*
* @return true, if there's a new record available, otherwise false
* @throws EOFException in case the end of the file was reached
*/
public boolean next() throws EOFException {
if (limit >= fileSize) {
throw new EOFException("End of file was reached");
}
if (mem.getLongVolatile(Structure.Limit) <= limit) {
return false;
}
byte commit = mem.getByteVolatile(limit);
byte rollback = mem.getByteVolatile(limit + Length.Commit);
if (rollback == Rollback.Set) {
limit += Length.RecordHeader + recordSize;
timeoutCounter = 0;
timerStart = 0;
return false;
}
if (commit == Commit.Set) {
timeoutCounter = 0;
timerStart = 0;
return true;
}
timeoutCounter++;
if (timeoutCounter >= MAX_TIMEOUT_COUNT) {
if (timerStart == 0) {
timerStart = System.currentTimeMillis();
} else {
if (System.currentTimeMillis() - timerStart >= maxTimeout) {
mem.putByteVolatile(limit + Length.Commit, Rollback.Set);
limit += Length.RecordHeader + recordSize;
timeoutCounter = 0;
timerStart = 0;
return false;
}
}
}
return false;
}
/**
* Reads the message type.
*
* @return the message type
*/
public int readType() {
typeRead = true;
limit += Length.StatusFlags;
int type = mem.getInt(limit);
limit += Length.Metadata;
return type;
}
/**
* Reads the next message.
*
* @param message the message object to populate
* @return the message object
*/
public MappedBusMessage readMessage(MappedBusMessage message) {
if (!typeRead) {
readType();
}
typeRead = false;
message.read(mem, limit);
limit += recordSize;
return message;
}
/**
* Reads the next buffer of data.
*
* @param dst the input buffer
* @param offset the offset in the buffer of the first byte to read data into
* @return the length of the record that was read
*/
public int readBuffer(byte[] dst, int offset) {
limit += Length.StatusFlags;
int length = mem.getInt(limit);
limit += Length.Metadata;
mem.getBytes(limit, dst, offset, length);
limit += recordSize;
return length;
}
/**
* Indicates whether all records available when the reader was created have been read.
*
* @return true, if all records available from the start was read, otherwise false
*/
public boolean hasRecovered() {
return limit >= initialLimit;
}
/**
* Closes the reader.
*
* @throws IOException if there was an error closing the file
*/
public void close() throws IOException {
try {
mem.unmap();
} catch(Exception e) {
throw new IOException("Unable to close the file", e);
}
}
}
| |
/**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.thirdeye.detection.algorithm;
import com.linkedin.thirdeye.dataframe.DataFrame;
import com.linkedin.thirdeye.dataframe.DoubleSeries;
import com.linkedin.thirdeye.dataframe.StringSeries;
import com.linkedin.thirdeye.dataframe.util.MetricSlice;
import com.linkedin.thirdeye.datalayer.dto.DetectionConfigDTO;
import com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO;
import com.linkedin.thirdeye.detection.DataProvider;
import com.linkedin.thirdeye.detection.MockDataProvider;
import com.linkedin.thirdeye.detection.MockPipeline;
import com.linkedin.thirdeye.detection.MockPipelineLoader;
import com.linkedin.thirdeye.detection.MockPipelineOutput;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static com.linkedin.thirdeye.dataframe.util.DataFrameUtils.*;
public class DimensionWrapperTest {
// exploration
private static final String PROP_METRIC_URN = "metricUrn";
private static final String PROP_DIMENSIONS = "dimensions";
private static final String PROP_MIN_VALUE = "minValue";
private static final String PROP_MIN_CONTRIBUTION = "minContribution";
private static final String PROP_K = "k";
private static final String PROP_LOOKBACK = "lookback";
// prototyping
private static final String PROP_NESTED = "nested";
private static final String PROP_NESTED_METRIC_URNS = "nestedMetricUrns";
private static final String PROP_NESTED_METRIC_URN_KEY = "nestedMetricUrnKey";
private static final String PROP_CLASS_NAME = "className";
// values
private static final Long PROP_ID_VALUE = 1000L;
private static final String PROP_NAME_VALUE = "myName";
private static final String PROP_CLASS_NAME_VALUE = "MyClassName";
private static final Collection<String> PROP_NESTED_METRIC_URN_VALUES = Collections.singleton("thirdeye:metric:2");
private static final String PROP_NESTED_METRIC_URN_KEY_VALUE = "myMetricUrn";
private DataProvider provider;
private DimensionWrapper wrapper;
private List<MockPipeline> runs;
private List<MockPipelineOutput> outputs;
private DetectionConfigDTO config;
private Map<String, Object> properties;
private Map<String, Object> nestedProperties;
private Map<MetricSlice, DataFrame> aggregates;
@BeforeMethod
public void beforeMethod() {
this.aggregates = new HashMap<>();
this.aggregates.put(MetricSlice.from(1, 10, 15),
new DataFrame()
.addSeries("a", StringSeries.buildFrom("1", "1", "1", "1", "1", "2", "2", "2", "2", "2"))
.addSeries("b", StringSeries.buildFrom("1", "2", "1", "2", "3", "1", "2", "1", "2", "3"))
.addSeries(COL_VALUE, DoubleSeries.buildFrom(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
this.runs = new ArrayList<>();
this.outputs = new ArrayList<>();
this.provider = new MockDataProvider()
.setAggregates(this.aggregates)
.setLoader(new MockPipelineLoader(this.runs, this.outputs));
this.nestedProperties = new HashMap<>();
this.nestedProperties.put(PROP_CLASS_NAME, PROP_CLASS_NAME_VALUE);
this.nestedProperties.put("key", "value");
this.properties = new HashMap<>();
this.properties.put(PROP_METRIC_URN, "thirdeye:metric:1");
this.properties.put(PROP_DIMENSIONS, Arrays.asList("a", "b"));
this.properties.put(PROP_NESTED_METRIC_URN_KEY, PROP_NESTED_METRIC_URN_KEY_VALUE);
this.properties.put(PROP_NESTED_METRIC_URNS, PROP_NESTED_METRIC_URN_VALUES);
this.properties.put(PROP_NESTED, Collections.singletonList(this.nestedProperties));
this.properties.put(PROP_LOOKBACK, 0);
this.config = new DetectionConfigDTO();
this.config.setId(PROP_ID_VALUE);
this.config.setName(PROP_NAME_VALUE);
this.config.setProperties(this.properties);
}
@Test
public void testLookBack() throws Exception {
this.properties.put(PROP_LOOKBACK, "5");
this.properties.put(PROP_DIMENSIONS, Collections.singleton("b"));
this.wrapper = new DimensionWrapper(this.provider, this.config, 14, 15);
this.wrapper.run();
Assert.assertEquals(this.runs.size(), 3);
assertEquals(this.runs.get(0), makePipeline("thirdeye:metric:2:b%3D1", 14, 15));
assertEquals(this.runs.get(1), makePipeline("thirdeye:metric:2:b%3D2", 14, 15));
assertEquals(this.runs.get(2), makePipeline("thirdeye:metric:2:b%3D3", 14, 15));
}
@Test
public void testSingleDimension() throws Exception {
this.properties.put(PROP_DIMENSIONS, Collections.singleton("b"));
this.wrapper = new DimensionWrapper(this.provider, this.config, 10, 15);
this.wrapper.run();
Assert.assertEquals(this.runs.size(), 3);
assertEquals(this.runs.get(0), makePipeline("thirdeye:metric:2:b%3D1"));
assertEquals(this.runs.get(1), makePipeline("thirdeye:metric:2:b%3D2"));
assertEquals(this.runs.get(2), makePipeline("thirdeye:metric:2:b%3D3"));
}
@Test
public void testMultiDimension() throws Exception {
this.properties.put(PROP_DIMENSIONS, Arrays.asList("a", "b"));
this.wrapper = new DimensionWrapper(this.provider, this.config, 10, 15);
this.wrapper.run();
Assert.assertEquals(this.runs.size(), 6);
assertEquals(this.runs.get(0), makePipeline("thirdeye:metric:2:a%3D1:b%3D1"));
assertEquals(this.runs.get(1), makePipeline("thirdeye:metric:2:a%3D1:b%3D2"));
assertEquals(this.runs.get(2), makePipeline("thirdeye:metric:2:a%3D1:b%3D3"));
assertEquals(this.runs.get(3), makePipeline("thirdeye:metric:2:a%3D2:b%3D1"));
assertEquals(this.runs.get(4), makePipeline("thirdeye:metric:2:a%3D2:b%3D2"));
assertEquals(this.runs.get(5), makePipeline("thirdeye:metric:2:a%3D2:b%3D3"));
}
@Test
public void testMinValue() throws Exception {
this.properties.put(PROP_DIMENSIONS, Collections.singleton("b"));
this.properties.put(PROP_MIN_VALUE, 16.0d);
this.wrapper = new DimensionWrapper(this.provider, this.config, 10, 15);
this.wrapper.run();
Assert.assertEquals(this.runs.size(), 2);
assertEquals(this.runs.get(0), makePipeline("thirdeye:metric:2:b%3D1"));
assertEquals(this.runs.get(1), makePipeline("thirdeye:metric:2:b%3D2"));
}
@Test
public void testMinContribution() throws Exception {
this.properties.put(PROP_DIMENSIONS, Collections.singleton("b"));
this.properties.put(PROP_MIN_CONTRIBUTION, 0.40d);
this.wrapper = new DimensionWrapper(this.provider, this.config, 10, 15);
this.wrapper.run();
Assert.assertEquals(this.runs.size(), 1);
assertEquals(this.runs.get(0), makePipeline("thirdeye:metric:2:b%3D2"));
}
@Test
public void testTopK() throws Exception {
this.properties.put(PROP_DIMENSIONS, Arrays.asList("a", "b"));
this.properties.put(PROP_K, 4);
this.wrapper = new DimensionWrapper(this.provider, this.config, 10, 15);
this.wrapper.run();
Assert.assertEquals(this.runs.size(), 4);
assertEquals(this.runs.get(0), makePipeline("thirdeye:metric:2:a%3D2:b%3D2"));
assertEquals(this.runs.get(1), makePipeline("thirdeye:metric:2:a%3D2:b%3D1"));
assertEquals(this.runs.get(2), makePipeline("thirdeye:metric:2:a%3D2:b%3D3"));
assertEquals(this.runs.get(3), makePipeline("thirdeye:metric:2:a%3D1:b%3D2"));
}
@Test
public void testNestedMetricsOnly() throws Exception {
this.properties.remove(PROP_METRIC_URN);
this.properties.remove(PROP_DIMENSIONS);
this.properties.put(PROP_NESTED_METRIC_URNS, Arrays.asList("thirdeye:metric:10", "thirdeye:metric:11", "thirdeye:metric:12"));
this.wrapper = new DimensionWrapper(this.provider, this.config, 10, 15);
this.wrapper.run();
Assert.assertEquals(this.runs.size(), 3);
assertEquals(this.runs.get(0), makePipeline("thirdeye:metric:10"));
assertEquals(this.runs.get(1), makePipeline("thirdeye:metric:11"));
assertEquals(this.runs.get(2), makePipeline("thirdeye:metric:12"));
}
@Test
public void testNestedMetricsAndDimensions() throws Exception {
this.properties.put(PROP_DIMENSIONS, Collections.singleton("b"));
this.properties.put(PROP_MIN_VALUE, 16.0d);
this.properties.put(PROP_NESTED_METRIC_URNS, Arrays.asList("thirdeye:metric:10", "thirdeye:metric:11"));
this.wrapper = new DimensionWrapper(this.provider, this.config, 10, 15);
this.wrapper.run();
Assert.assertEquals(this.runs.size(), 4);
assertEquals(this.runs.get(0), makePipeline("thirdeye:metric:10:b%3D1"));
assertEquals(this.runs.get(1), makePipeline("thirdeye:metric:10:b%3D2"));
assertEquals(this.runs.get(2), makePipeline("thirdeye:metric:11:b%3D1"));
assertEquals(this.runs.get(3), makePipeline("thirdeye:metric:11:b%3D2"));
}
private DetectionConfigDTO makeConfig(String metricUrn) {
Map<String, Object> properties = new HashMap<>(this.nestedProperties);
properties.put(PROP_NESTED_METRIC_URN_KEY_VALUE, metricUrn);
DetectionConfigDTO config = new DetectionConfigDTO();
config.setId(this.config.getId());
config.setName(this.config.getName());
config.setProperties(properties);
return config;
}
private MockPipeline makePipeline(String metricUrn) {
return makePipeline(metricUrn, 10, 15);
}
private MockPipeline makePipeline(String metricUrn, long startTime, long endTime) {
return new MockPipeline(this.provider, makeConfig(metricUrn), startTime, endTime,
new MockPipelineOutput(Collections.<MergedAnomalyResultDTO>emptyList(), -1));
}
private static void assertEquals(MockPipeline a, MockPipeline b) {
Assert.assertEquals(a, b);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertTrue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(MediumTests.class)
public class TestAddColumnFamilyProcedure {
private static final Log LOG = LogFactory.getLog(TestAddColumnFamilyProcedure.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static void setupConf(Configuration conf) {
conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1);
}
@BeforeClass
public static void setupCluster() throws Exception {
setupConf(UTIL.getConfiguration());
UTIL.startMiniCluster(1);
}
@AfterClass
public static void cleanupTest() throws Exception {
try {
UTIL.shutdownMiniCluster();
} catch (Exception e) {
LOG.warn("failure shutting down cluster", e);
}
}
@Before
public void setup() throws Exception {
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(getMasterProcedureExecutor(), false);
}
@After
public void tearDown() throws Exception {
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(getMasterProcedureExecutor(), false);
for (HTableDescriptor htd: UTIL.getHBaseAdmin().listTables()) {
LOG.info("Tear down, remove table=" + htd.getTableName());
UTIL.deleteTable(htd.getTableName());
}
}
@Test(timeout = 60000)
public void testAddColumnFamily() throws Exception {
final TableName tableName = TableName.valueOf("testAddColumnFamily");
final String cf1 = "cf1";
final String cf2 = "cf2";
final HColumnDescriptor columnDescriptor1 = new HColumnDescriptor(cf1);
final HColumnDescriptor columnDescriptor2 = new HColumnDescriptor(cf2);
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f3");
// Test 1: Add a column family online
long procId1 =
procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName,
columnDescriptor1));
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId1);
ProcedureTestingUtility.assertProcNotFailed(procExec, procId1);
MasterProcedureTestingUtility.validateColumnFamilyAddition(UTIL.getHBaseCluster().getMaster(),
tableName, cf1);
// Test 2: Add a column family offline
UTIL.getHBaseAdmin().disableTable(tableName);
long procId2 =
procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName,
columnDescriptor2));
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId2);
ProcedureTestingUtility.assertProcNotFailed(procExec, procId2);
MasterProcedureTestingUtility.validateColumnFamilyAddition(UTIL.getHBaseCluster().getMaster(),
tableName, cf2);
}
@Test(timeout=60000)
public void testAddSameColumnFamilyTwice() throws Exception {
final TableName tableName = TableName.valueOf("testAddColumnFamilyTwice");
final String cf2 = "cf2";
final HColumnDescriptor columnDescriptor = new HColumnDescriptor(cf2);
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1");
// add the column family
long procId1 =
procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName,
columnDescriptor));
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId1);
ProcedureTestingUtility.assertProcNotFailed(procExec, procId1);
MasterProcedureTestingUtility.validateColumnFamilyAddition(UTIL.getHBaseCluster().getMaster(),
tableName, cf2);
// add the column family that exists
long procId2 =
procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName,
columnDescriptor));
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId2);
// Second add should fail with InvalidFamilyOperationException
ProcedureResult result = procExec.getResult(procId2);
assertTrue(result.isFailed());
LOG.debug("Add failed with exception: " + result.getException());
assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
// Do the same add the existing column family - this time offline
UTIL.getHBaseAdmin().disableTable(tableName);
long procId3 =
procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName,
columnDescriptor));
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId3);
// Second add should fail with InvalidFamilyOperationException
result = procExec.getResult(procId3);
assertTrue(result.isFailed());
LOG.debug("Add failed with exception: " + result.getException());
assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
}
@Test(timeout = 60000)
public void testRecoveryAndDoubleExecutionOffline() throws Exception {
final TableName tableName = TableName.valueOf("testRecoveryAndDoubleExecutionOffline");
final String cf4 = "cf4";
final HColumnDescriptor columnDescriptor = new HColumnDescriptor(cf4);
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
// create the table
MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1", "f2", "f3");
UTIL.getHBaseAdmin().disableTable(tableName);
ProcedureTestingUtility.waitNoProcedureRunning(procExec);
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
// Start the AddColumnFamily procedure && kill the executor
long procId =
procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName,
columnDescriptor));
// Restart the executor and execute the step twice
int numberOfSteps = AddColumnFamilyState.values().length;
MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId, numberOfSteps,
AddColumnFamilyState.values());
MasterProcedureTestingUtility.validateColumnFamilyAddition(UTIL.getHBaseCluster().getMaster(),
tableName, cf4);
}
@Test(timeout = 60000)
public void testRecoveryAndDoubleExecutionOnline() throws Exception {
final TableName tableName = TableName.valueOf("testRecoveryAndDoubleExecutionOnline");
final String cf5 = "cf5";
final HColumnDescriptor columnDescriptor = new HColumnDescriptor(cf5);
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
// create the table
MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1", "f2", "f3");
ProcedureTestingUtility.waitNoProcedureRunning(procExec);
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
// Start the AddColumnFamily procedure && kill the executor
long procId =
procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName,
columnDescriptor));
// Restart the executor and execute the step twice
int numberOfSteps = AddColumnFamilyState.values().length;
MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId, numberOfSteps,
AddColumnFamilyState.values());
MasterProcedureTestingUtility.validateColumnFamilyAddition(UTIL.getHBaseCluster().getMaster(),
tableName, cf5);
}
@Test(timeout = 60000)
public void testRollbackAndDoubleExecution() throws Exception {
final TableName tableName = TableName.valueOf("testRollbackAndDoubleExecution");
final String cf6 = "cf6";
final HColumnDescriptor columnDescriptor = new HColumnDescriptor(cf6);
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
// create the table
MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f1", "f2");
ProcedureTestingUtility.waitNoProcedureRunning(procExec);
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
// Start the AddColumnFamily procedure && kill the executor
long procId =
procExec.submitProcedure(new AddColumnFamilyProcedure(procExec.getEnvironment(), tableName,
columnDescriptor));
int numberOfSteps = AddColumnFamilyState.values().length - 2; // failing in the middle of proc
MasterProcedureTestingUtility.testRollbackAndDoubleExecution(procExec, procId, numberOfSteps,
AddColumnFamilyState.values());
MasterProcedureTestingUtility.validateColumnFamilyDeletion(UTIL.getHBaseCluster().getMaster(),
tableName, cf6);
}
private ProcedureExecutor<MasterProcedureEnv> getMasterProcedureExecutor() {
return UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.tools;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.cassandra.io.util.File;
import org.apache.commons.io.FileUtils;
import org.junit.BeforeClass;
import org.slf4j.LoggerFactory;
import static org.apache.cassandra.utils.FBUtilities.preventIllegalAccessWarnings;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Helper class for running a tool and doing in-process checks
*/
public abstract class OfflineToolUtils
{
static
{
preventIllegalAccessWarnings();
}
private static List<ThreadInfo> initialThreads;
static final String[] OPTIONAL_THREADS_WITH_SCHEMA = {
"ScheduledTasks:[1-9]",
"ScheduledFastTasks:[1-9]",
"OptionalTasks:[1-9]",
"Reference-Reaper",
"LocalPool-Cleaner(-networking|-chunk-cache)",
"CacheCleanupExecutor:[1-9]",
"CompactionExecutor:[1-9]",
"ValidationExecutor:[1-9]",
"NonPeriodicTasks:[1-9]",
"Sampler:[1-9]",
"SecondaryIndexManagement:[1-9]",
"Strong-Reference-Leak-Detector:[1-9]",
"Background_Reporter:[1-9]",
"EXPIRING-MAP-REAPER:[1-9]",
"ObjectCleanerThread",
"process reaper", // spawned by the jvm when executing external processes
// and may still be active when we check
};
public void assertNoUnexpectedThreadsStarted(String[] optionalThreadNames)
{
ThreadMXBean threads = ManagementFactory.getThreadMXBean();
Set<String> initial = initialThreads
.stream()
.map(ThreadInfo::getThreadName)
.collect(Collectors.toSet());
Set<String> current = Arrays.stream(threads.getThreadInfo(threads.getAllThreadIds()))
.filter(Objects::nonNull)
.map(ThreadInfo::getThreadName)
.collect(Collectors.toSet());
List<Pattern> optional = optionalThreadNames != null
? Arrays.stream(optionalThreadNames).map(Pattern::compile).collect(Collectors.toList())
: Collections.emptyList();
current.removeAll(initial);
Set<String> remain = current.stream()
.filter(threadName -> optional.stream().noneMatch(pattern -> pattern.matcher(threadName).matches()))
.collect(Collectors.toSet());
if (!remain.isEmpty())
System.err.println("Unexpected thread names: " + remain);
assertTrue("Wrong thread status, active threads unaccounted for: " + remain, remain.isEmpty());
}
public void assertSchemaNotLoaded()
{
assertClassNotLoaded("org.apache.cassandra.schema.Schema");
}
public void assertSchemaLoaded()
{
assertClassLoaded("org.apache.cassandra.schema.Schema");
}
public void assertKeyspaceNotLoaded()
{
assertClassNotLoaded("org.apache.cassandra.db.Keyspace");
}
public void assertKeyspaceLoaded()
{
assertClassLoaded("org.apache.cassandra.db.Keyspace");
}
public void assertServerNotLoaded()
{
assertClassNotLoaded("org.apache.cassandra.transport.Server");
}
public void assertSystemKSNotLoaded()
{
assertClassNotLoaded("org.apache.cassandra.db.SystemKeyspace");
}
public void assertCLSMNotLoaded()
{
assertClassNotLoaded("org.apache.cassandra.db.commitlog.CommitLogSegmentManager");
}
public void assertClassLoaded(String clazz)
{
assertClassLoadedStatus(clazz, true);
}
public void assertClassNotLoaded(String clazz)
{
assertClassLoadedStatus(clazz, false);
}
private void assertClassLoadedStatus(String clazz, boolean expected)
{
for (ClassLoader cl = Thread.currentThread().getContextClassLoader(); cl != null; cl = cl.getParent())
{
try
{
Method mFindLoadedClass = ClassLoader.class.getDeclaredMethod("findLoadedClass", String.class);
mFindLoadedClass.setAccessible(true);
boolean loaded = mFindLoadedClass.invoke(cl, clazz) != null;
if (expected)
{
if (loaded)
return;
}
else
assertFalse(clazz + " has been loaded", loaded);
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
if (expected)
fail(clazz + " has not been loaded");
}
@BeforeClass
public static void setupTester()
{
System.setProperty("cassandra.partitioner", "org.apache.cassandra.dht.Murmur3Partitioner");
// may start an async appender
LoggerFactory.getLogger(OfflineToolUtils.class);
ThreadMXBean threads = ManagementFactory.getThreadMXBean();
initialThreads = Arrays.asList(threads.getThreadInfo(threads.getAllThreadIds()));
}
public static String findOneSSTable(String ks, String cf) throws IOException
{
File cfDir = sstableDir(ks, cf);
File[] sstableFiles = cfDir.tryList((file) -> file.isFile() && file.name().endsWith("-Data.db"));
return sstableFiles[0].absolutePath();
}
public static String sstableDirName(String ks, String cf) throws IOException
{
return sstableDir(ks, cf).absolutePath();
}
public static File sstableDir(String ks, String cf) throws IOException
{
File dataDir = copySSTables();
File ksDir = new File(dataDir, ks);
File[] cfDirs = ksDir.tryList((dir, name) -> cf.equals(name) || name.startsWith(cf + '-'));
return cfDirs[0];
}
public static File copySSTables() throws IOException
{
File dataDir = new File("build/test/cassandra/data");
File srcDir = new File("test/data/legacy-sstables/ma");
FileUtils.copyDirectory(new File(srcDir, "legacy_tables").toJavaIOFile(), new File(dataDir, "legacy_sstables").toJavaIOFile());
return dataDir;
}
protected void assertCorrectEnvPostTest()
{
assertNoUnexpectedThreadsStarted(OPTIONAL_THREADS_WITH_SCHEMA);
assertSchemaLoaded();
assertServerNotLoaded();
}
}
| |
/* $Id$
*
* @license
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.almende.dsol.example.datacenters;
import io.coala.dsol.util.DsolUtil;
import io.coala.log.LogUtil;
import java.rmi.RemoteException;
import java.util.Map;
import java.util.NavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import nl.tudelft.simulation.dsol.SimRuntimeException;
import nl.tudelft.simulation.dsol.experiment.TimeUnitInterface;
import nl.tudelft.simulation.dsol.formalisms.eventscheduling.SimEvent;
import nl.tudelft.simulation.event.EventInterface;
import nl.tudelft.simulation.event.EventListenerInterface;
import nl.tudelft.simulation.jstats.distributions.DistConstant;
import nl.tudelft.simulation.jstats.distributions.DistContinuous;
import nl.tudelft.simulation.jstats.streams.StreamInterface;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.Interval;
/**
* Datacenter
*/
public class Datacenter extends AbstractFederationModelComponent implements
Comparable<Datacenter> {
/** */
private static final long serialVersionUID = 1L;
/** */
private static final Logger LOG = LogUtil.getLogger(Datacenter.class);
/** */
public static int ID_COUNT = 0;
/** DC Energy Emissions (kg) per kWh */
private final double emissionsPerKWh;// 1.66d, 5.0, 8.33
/** DC Energy Costs (EUR) per kWh */
private final double cashPerKWh;// = 0.1
/** service power (Watt) = energy (J) over time (s) */
private final DistContinuous consumptionKW;
/** service cash flow (EUR) per hour */
private final DistContinuous hourlyCashflow;
/** DC cash flow (EUR) */
private final Accumulator cashflow;
/** DC emissions (kg CO2) */
private final Accumulator emissionsKgCO2;
/** DC power consumption (KWh) */
private final Accumulator consumptionKWh;
/** DC current service count */
private final Indicator currentServiceCount;
/** DC total service count */
private final Indicator totalServiceCount;
/** DC total service count */
public final Indicator exchangedServiceCount;
/** DC total service count */
public final Indicator workloadHours;
/** DC total service count */
public final Indicator adaptionHours;
/** */
public final Indicator emissionFactor;
/** */
public final Indicator consumptionFactor;
/** */
protected final NavigableMap<DateTime, Interval> outsourceSchedule = new ConcurrentSkipListMap<>();
/** */
protected final NavigableMap<DateTime, Interval> insourceSchedule = new ConcurrentSkipListMap<>();
/**
* @param model
* @throws RemoteException
* @throws SimRuntimeException
*/
public Datacenter(final FederationModel model) throws Exception {
super(model,"DC" + ID_COUNT++);
final String indicatorName = getName();
this.cashflow = new Accumulator(0d, model, "Cash (flow)",
"Money (EUR)", indicatorName, 0d, TimeUnitInterface.HOUR);
this.emissionsKgCO2 = new Accumulator(0d, model,
"Carbon Emissions (rate)", "CO2 (kg/kW)", indicatorName, 0d,
TimeUnitInterface.HOUR);
// idle (kW) 20 servers*100 watt(idle)=2 kw (idle * consumption)
this.consumptionKWh = new Accumulator(0d, model,
"Energy Consumption (rate)", "Energy (kW)", indicatorName, 2d,
TimeUnitInterface.HOUR);
this.currentServiceCount = new Indicator(model, "Current Services",
"Services (#)", indicatorName, 0d);
this.totalServiceCount = new Indicator(model, "Total Services",
"Services (#)", indicatorName, 0d);
this.exchangedServiceCount = new Indicator(model, "Exchanges (#)",
"Exchanges (#)", indicatorName, 0d);
this.workloadHours = new Indicator(model, "Workload Duration",
"Hours (#)", indicatorName, 0d);
this.adaptionHours = new Indicator(model, "Adaption Duration",
"Hours (#)", indicatorName, 0d);
this.emissionFactor = new Indicator(model, "Power Mix Variation",
"Carbon Emission factor", indicatorName, 1d);
this.consumptionFactor = new Indicator(model, "Power Adaption",
"Energy Consumption factor", indicatorName, 1d);
final StreamInterface stream = model.getSimulator().getReplication()
.getStream(FederationModel.RNG_ID);
this.cashPerKWh = 0.1d;// new DistConstant(stream, .1d);
this.emissionsPerKWh = 5.0d;// new DistConstant(stream, 5.0d);
this.consumptionKW = new DistConstant(stream, .1d);
this.hourlyCashflow = new DistConstant(stream, .1d);
// add demand and listen for new demand events
final DemandGenerator demand = new DemandGenerator(model);
demand.addListener(new EventListenerInterface() {
@Override
public void notify(final EventInterface event)
throws RemoteException {
try {
newService((Interval) event.getContent());
} catch (final SimRuntimeException e) {
LOG.error("Problem scheduling new service", e);
}
}
}, DemandGenerator.NEW_DEMAND);
}
/* @see FederationModelComponent#getName() */
@Override
public String toString() {
return getName();
}
public Accumulator getCashflow() {
return this.cashflow;
}
public Accumulator getEmissionsKgCO2() {
return this.emissionsKgCO2;
}
public Accumulator getConsumptionKWh() {
return this.consumptionKWh;
}
public Indicator getCurrentServiceCount() {
return this.currentServiceCount;
}
public Indicator getTotalServiceCount() {
return this.totalServiceCount;
}
private boolean isSavingsMode(final DateTime date) {
final Map.Entry<DateTime, Interval> lastBefore = this.outsourceSchedule
.floorEntry(date);
if (lastBefore != null && lastBefore.getValue().contains(date))
return true;
final Map.Entry<DateTime, Interval> firstAfter = this.outsourceSchedule
.ceilingEntry(date);
if (firstAfter != null && firstAfter.getValue().contains(date))
return true;
return false;
}
private boolean isExtraMode(final DateTime date) {
final Map.Entry<DateTime, Interval> lastBefore = this.insourceSchedule
.floorEntry(date);
if (lastBefore != null && lastBefore.getValue().contains(date))
return true;
final Map.Entry<DateTime, Interval> firstAfter = this.insourceSchedule
.ceilingEntry(date);
if (firstAfter != null && firstAfter.getValue().contains(date))
return true;
return false;
}
protected void newService(final Interval interval) throws RemoteException,
SimRuntimeException {
if (getModel().hasRemoteAllocation()) {
if (isSavingsMode(interval.getStart())) {
if (getModel().hasBroker())
for (Datacenter dc : getModel().getDatacenters()) {
if (dc == this)
continue;
if (dc.isExtraMode(interval.getStart())) {
newService(interval, .1, .1);
dc.newService(interval, 1.0, .9);
// LOG.trace(getName() + " Outsourced to EXTRA dc "
// + interval);
return;
}
}
for (Datacenter dc : getModel().getDatacenters()) {
if (dc == this)
continue;
if (!dc.isSavingsMode(interval.getStart())) {
newService(interval, .1, .1);
dc.newService(interval, 1.0, .9);
return;
}
}
LOG.trace(getName() + " Could not remotely allocate service "
+ interval);
}
// else
// LOG.trace(getName()
// + " Running locally during EXTRA mode, service: "
// + interval);
}
// else
// LOG.trace(getName()
// + " Running locally during REGULAR mode, service: "
// + interval);
newService(interval, 1.0, 1.0);
newService(interval, 0.0, 0.0);
}
protected void newService(final Interval interval,
final double consumptionFactor, final double cashFlowFactor)
throws RemoteException, SimRuntimeException {
final Service service = new Service(getModel(), this, interval,
this.consumptionKW.draw() * consumptionFactor,
this.hourlyCashflow.draw() * cashFlowFactor);
final Interval cropped = DsolUtil.crop(interval, getTreatment());
this.workloadHours.addValue(DsolUtil.toTimeUnit(
TimeUnitInterface.HOUR, cropped.toDurationMillis(),
TimeUnitInterface.MILLISECOND));
// listen for service start and stop events
service.addListener(new EventListenerInterface() {
@Override
public void notify(final EventInterface event) {
if (consumptionFactor == 0.0)
return;
getCurrentServiceCount().addValue(1);
getTotalServiceCount().addValue(1);
if (consumptionFactor < 1.0)
exchangedServiceCount.addValue(1);
}
}, Service.SERVICE_STARTED);
service.addListener(new EventListenerInterface() {
@Override
public void notify(final EventInterface event) {
getCurrentServiceCount().addValue(-1);
}
}, Service.SERVICE_COMPLETED);
service.addListener(new EventListenerInterface() {
@Override
public void notify(final EventInterface event) {
final Number kW = (Number) event.getContent();
getConsumptionKWh().addRate(
kW.doubleValue()
* Datacenter.this.consumptionFactor.getValue()
.doubleValue());
getEmissionsKgCO2().addRate(
kW.doubleValue()
* Datacenter.this.consumptionFactor.getValue()
.doubleValue()
* Datacenter.this.emissionsPerKWh
* Datacenter.this.emissionFactor.getValue()
.doubleValue());
getCashflow().addRate(
kW.doubleValue() * Datacenter.this.cashPerKWh);
}
}, Service.CONSUMPTION_CHANGED);
service.addListener(new EventListenerInterface() {
@Override
public void notify(final EventInterface event) {
getCashflow().addRate((Number) event.getContent());
}
}, Service.CASHFLOW_CHANGED);
}
protected void newAdaption(final Interval interval,
final double consumptionFactor) throws RemoteException,
SimRuntimeException {
if (consumptionFactor > 1.0)
this.outsourceSchedule.put(interval.getStart(), interval);
else if (consumptionFactor < 1.0)
this.insourceSchedule.put(interval.getStart(), interval);
final double startTime = simTime(interval.getStart());
final double endTime = simTime(interval.getEnd());
this.adaptionHours.addValue(DsolUtil.toTimeUnit(
TimeUnitInterface.HOUR, interval.toDuration().getMillis(),
TimeUnitInterface.MILLISECOND));
getSimulator().scheduleEvent(
new SimEvent(startTime, this, this,
APPLY_CONSUMPTION_FACTOR_METHOD_ID, new Object[] {
consumptionFactor, false }));
getSimulator().scheduleEvent(
new SimEvent(endTime, this, this,
APPLY_CONSUMPTION_FACTOR_METHOD_ID, new Object[] {
1.0 / consumptionFactor, true }));
}
protected void newPowermix(final Interval interval,
final double emissionFactor) throws RemoteException,
SimRuntimeException {
final double startTime = simTime(interval.getStart());
final double endTime = simTime(interval.getEnd());
getSimulator().scheduleEvent(
new SimEvent(startTime, this, this,
APPLY_EMISSION_FACTOR_METHOD_ID, new Object[] {
emissionFactor, false }));
getSimulator().scheduleEvent(
new SimEvent(endTime, this, this,
APPLY_EMISSION_FACTOR_METHOD_ID, new Object[] {
1.0 / emissionFactor, true }));
}
private static final Number ONE = 1.0d;
private static final String APPLY_CONSUMPTION_FACTOR_METHOD_ID = "applyConsumptionFactor";
protected void applyConsumptionFactor(final double consumptionFactor,
final boolean reset) {
getConsumptionKWh().multiplyRate(consumptionFactor);
this.consumptionFactor.setValue(reset ? ONE : consumptionFactor);
// TODO live (re)migration of some active workload?
}
private static final String APPLY_EMISSION_FACTOR_METHOD_ID = "applyEmissionFactor";
protected void applyEmissionFactor(final double emissionFactor,
final boolean reset) {
getEmissionsKgCO2().multiplyRate(emissionFactor);
this.emissionFactor.setValue(reset ? ONE : emissionFactor);
}
@Override
public int compareTo(final Datacenter o) {
return getName().compareTo(o.getName());
}
}
| |
import java.awt.Point;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
import org.jnativehook.GlobalScreen;
import org.jnativehook.NativeHookException;
import org.jnativehook.keyboard.NativeKeyEvent;
import org.jnativehook.keyboard.NativeKeyListener;
import org.jnativehook.mouse.NativeMouseEvent;
import org.jnativehook.mouse.NativeMouseMotionListener;
import argo.jdom.JsonNode;
import argo.jdom.JsonNodeFactories;
import argo.jdom.JsonRootNode;
import argo.jdom.JsonStringNode;
public class PostClass {
private static boolean enabled;
private static int DURATION = 1000;
public static void main(String[] args) throws InterruptedException, NativeHookException, IOException {
if (args.length > 0) {
DURATION = Integer.parseInt(args[0]);
System.out.println("Applied duration = " + DURATION);
if (DURATION < 100) {
System.out.println("Duration to small");
return;
}
}
final Queue<Point> ps = new ConcurrentLinkedQueue<>();
// Get the logger for "org.jnativehook" and set the level to off.
Logger logger = Logger.getLogger(GlobalScreen.class.getPackage().getName());
logger.setLevel(Level.OFF);
// Change the level for all handlers attached to the default logger.
Handler[] handlers = Logger.getLogger("").getHandlers();
for (int i = 0; i < handlers.length; i++) {
handlers[i].setLevel(Level.OFF);
}
GlobalScreen.registerNativeHook();
GlobalScreen.addNativeKeyListener(new NativeKeyListener() {
@Override
public void nativeKeyTyped(NativeKeyEvent arg0) {
}
@Override
public void nativeKeyReleased(NativeKeyEvent arg0) {
if (arg0.getKeyCode() == NativeKeyEvent.VC_CONTROL_L) {
enabled = false;
try {
doIt(ps);
} catch (IOException e) {
e.printStackTrace();
}
}
}
@Override
public void nativeKeyPressed(NativeKeyEvent arg0) {
if (enabled) {
return;
}
if (arg0.getKeyCode() == NativeKeyEvent.VC_CONTROL_L) {
enabled = true;
}
}
});
GlobalScreen.addNativeMouseMotionListener(new NativeMouseMotionListener() {
@Override
public void nativeMouseMoved(NativeMouseEvent arg0) {
if (enabled) {
ps.add(new Point(arg0.getX(), arg0.getY()));
}
}
@Override
public void nativeMouseDragged(NativeMouseEvent arg0) {
}
});
Thread.sleep(20000);
GlobalScreen.unregisterNativeHook();
}
private static void doIt(final Queue<Point> ps) throws IOException {
List<JsonNode> points = new ArrayList<>(ps.size());
final int size = ps.size();
System.out.println(size);
for (int i = 0 ;i < size; i++) {
Point p = ps.poll();
JsonNode comer = pointToNode(p);
points.add(comer);
}
post(JsonNodeFactories.object(
JsonNodeFactories.field(
"data", JsonNodeFactories.array(points))));
}
private static JsonNode pointToNode(Point p) {
return JsonNodeFactories.array(
JsonNodeFactories.number(p.x),
JsonNodeFactories.number(p.y));
}
private static void post(JsonNode content) throws IOException {
byte[] out = JSONUtility.jsonToString(content.getRootNode()).getBytes(Charset.forName("UTF-8"));
int length = out.length;
URL url = new URL("http://localhost:8000");
URLConnection con = url.openConnection();
HttpURLConnection http = (HttpURLConnection)con;
http.setRequestMethod("POST"); // PUT is another valid option
http.setDoOutput(true);
http.setFixedLengthStreamingMode(length);
http.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
http.connect();
try (OutputStream os = http.getOutputStream()) {
os.write(out);
os.flush();
}
int responseCode = http.getResponseCode();
// System.out.println("\nSending 'POST' request to URL : " + url);
// System.out.println("Response Code : " + responseCode);
String result = null;
try (BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()))) {
String inputLine;
StringBuilder response = new StringBuilder();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
// print result
result = response.toString();
}
JsonRootNode parsed = JSONUtility.jsonFromString(result);
JsonNode toAnalyze = parsed.getNode("result");
Map<JsonStringNode, JsonNode> fields = toAnalyze.getFields();
List<ID> converted = new ArrayList<>(5);
int totalCount = 0;
for (Entry<JsonStringNode, JsonNode> entry : fields.entrySet()) {
String name = entry.getKey().getStringValue();
int number = Integer.parseInt(entry.getValue().getNumberValue());
if (name.equals("horizontal") ||name.equals("vertical") || name.equals("random")) {
continue;
}
totalCount += number;
converted.add(new ID(name, number));
}
if (converted.size() == 0) {
System.out.println(".");
} else if (converted.size() == 1) {
System.out.println(result);
if (converted.get(0).count > 4) {
System.out.println("======> " + converted.get(0).name);
JOptionPane.showMessageDialog(null, "Detected symbol " + converted.get(0).name);
} else {
System.out.println(".");
}
} else {
Collections.sort(converted, new Comparator<ID>() {
@Override
public int compare(ID o1, ID o2) {
return o1.count - o2.count;
}
});
Collections.reverse(converted);
int max = converted.get(0).count;
int nextMax = converted.get(1).count;
System.out.println(result);
if (((double) max / totalCount > 0.5) && (max - nextMax > 10)) {
System.out.println("======> " + converted.get(0).name);
JOptionPane.showMessageDialog(null, "Detected symbol " + converted.get(0).name);
} else {
System.out.println("======> random");
}
}
}
private static class ID {
private final String name;
private final int count;
private ID(String name, int count) {
this.name = name;
this.count = count;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.assembler;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.ResourceFactory;
public class JA
{
public static final String uri = "http://jena.hpl.hp.com/2005/11/Assembler#";
public static String getURI()
{ return uri; }
protected static Model schema;
protected static Resource resource( final String localName )
{ return ResourceFactory.createResource( uri + localName ); }
public static Property property( final String localName )
{ return ResourceFactory.createProperty( uri + localName ); }
public static final Resource MemoryModel = resource( "MemoryModel" );
public static final Resource DefaultModel = resource( "DefaultModel" );
public static final Resource InfModel = resource( "InfModel" );
public static final Resource Object = resource( "Object" );
public static final Property reasoner = property( "reasoner" );
public static final Property reasonerURL = property( "reasonerURL" );
public static final Property baseModel = property( "baseModel" );
public static final Property literalContent = property( "literalContent" );
public static final Property rules = property( "rules" );
public static final Resource Model = resource( "Model" );
public static final Resource OntModel = resource( "OntModel" );
public static final Resource NamedModel = resource( "NamedModel" );
public static final Resource FileModel = resource( "FileModel" );
public static final Resource PrefixMapping = resource( "PrefixMapping" );
public static final Resource ReasonerFactory = resource( "ReasonerFactory" );
public static final Resource HasFileManager = resource( "HasFileManager" );
public static final Resource Content = resource( "Content" );
public static final Resource LiteralContent = resource( "LiteralContent" );
public static final Resource OntModelSpec = resource( "OntModelSpec" );
public static final Resource ModelSource = resource( "ModelSource" );
public static final Property data = property( "data" );
public static final Property content = property( "content" );
public static final Resource ExternalContent = resource( "ExternalContent" );
public static final Property externalContent = property( "externalContent" );
public static final Property modelName = property( "modelName" );
public static final Property ontModelSpec = property( "ontModelSpec" );
public static final Resource This = resource( "this" );
public static final Resource True = resource( "true" );
public static final Resource False = resource( "false" );
public static final Resource Expanded = resource( "Expanded" );
public static final Property prefix = property( "prefix" );
public static final Property namespace = property( "namespace" );
public static final Property includes = property( "includes" );
public static final Property directory = property( "directory" );
public static final Property create = property( "create" );
public static final Property strict = property( "strict" );
public static final Property mapName = property( "mapName" );
public static final Property documentManager = property( "documentManager" );
public static final Property ontLanguage = property( "ontLanguage" );
public static final Property importSource = property( "importSource" );
public static final Property quotedContent = property( "quotedContent" );
public static final Property contentEncoding = property( "contentEncoding" );
public static final Property initialContent = property( "initialContent" );
public static final Resource RuleSet = resource( "RuleSet" );
public static final Property rule = property( "rule" );
public static final Resource HasRules = resource( "HasRules" );
public static final Property rulesFrom = property( "rulesFrom" );
public static final Resource ContentItem = resource( "ContentItem" );
public static final Resource LocationMapper = resource( "LocationMapper" );
public static final Property locationMapper = property( "locationMapper" );
public static final Resource FileManager = resource( "FileManager" );
public static final Resource DocumentManager = resource( "DocumentManager" );
public static final Property fileManager = property( "fileManager" );
public static final Property policyPath = property( "policyPath" );
public static final Resource UnionModel = resource( "UnionModel" );
public static final Property subModel = property( "subModel" );
public static final Property rootModel = property( "rootModel" );
@Deprecated
public static final Property reificationMode = property( "reificationMode" );
public static final Resource minimal = resource( "minimal" );
public static final Resource convenient = resource( "convenient" );
public static final Resource standard = resource( "standard" );
@Deprecated
public static final Resource ReificationMode = resource( "ReificationMode" );
public static final Property fileEncoding = property( "fileEncoding" );
public static final Property assembler = property( "assembler" );
public static final Property loadClass = property( "loadClass" );
public static final Property imports = property( "imports" );
public static final Property reasonerFactory = property( "reasonerFactory" );
public static final Property reasonerClass = property( "reasonerClass" );
public static final Property ja_schema = property( "schema" );
public static final Property likeBuiltinSpec = property( "likeBuiltinSpec" );
public static final Resource SinglePrefixMapping = resource( "SinglePrefixMapping");
public static final Property prefixMapping = property( "prefixMapping" );
public static Model getSchema()
{ // inline packagename to avoid clash with /our/ FileManager.
if (schema == null) schema = complete( org.apache.jena.util.FileManager.get().loadModel( getSchemaPath() ) );
return schema;
}
private static Model complete( final Model m )
{
final Model result = ModelFactory.createDefaultModel();
result.add( ModelFactory.createRDFSModel( m ) );
return result;
}
private static String getSchemaPath()
{ return "org/apache/jena/vocabulary/assembler.ttl"; }
}
| |
/*
* Copyright (c) 2014-2015 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, without warranties or
* conditions of any kind, EITHER EXPRESS OR IMPLIED. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.vmware.xenon.services.common;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.esotericsoftware.kryo.serializers.VersionFieldSerializer.Since;
import com.vmware.xenon.common.FactoryService;
import com.vmware.xenon.common.Operation;
import com.vmware.xenon.common.OperationProcessingChain;
import com.vmware.xenon.common.RequestRouter;
import com.vmware.xenon.common.RequestRouter.Route.RouteDocumentation;
import com.vmware.xenon.common.ServiceDocument;
import com.vmware.xenon.common.ServiceDocumentDescription.PropertyDescription;
import com.vmware.xenon.common.ServiceDocumentDescription.PropertyIndexingOption;
import com.vmware.xenon.common.ServiceDocumentDescription.PropertyUsageOption;
import com.vmware.xenon.common.StatefulService;
import com.vmware.xenon.common.Utils;
import com.vmware.xenon.common.serialization.ReleaseConstants;
/**
* Example service
*/
public class ExampleService extends StatefulService {
public static final String FACTORY_LINK = ServiceUriPaths.CORE + "/examples";
public static class ExampleODLService extends ExampleService {
public static final String FACTORY_LINK = ServiceUriPaths.CORE + "/odl-examples";
/**
* Create a default factory service that starts instances of this service on POST.
* This method is optional, {@code FactoryService.create} can be used directly
*/
public static FactoryService createFactory() {
return FactoryService.create(ExampleODLService.class);
}
public ExampleODLService() {
super();
super.toggleOption(ServiceOption.ON_DEMAND_LOAD, true);
super.toggleOption(ServiceOption.INSTRUMENTATION, false);
}
}
public static class ExampleImmutableService extends ExampleService {
public static final String FACTORY_LINK = ServiceUriPaths.CORE + "/immutable-examples";
public static FactoryService createFactory() {
return FactoryService.create(ExampleImmutableService.class);
}
public ExampleImmutableService() {
super();
super.toggleOption(ServiceOption.IMMUTABLE, true);
super.toggleOption(ServiceOption.ON_DEMAND_LOAD, true);
super.toggleOption(ServiceOption.INSTRUMENTATION, false);
}
}
/**
* Request for strict update version check of a service. This shows an example of implementing
* compareAndSet operation in a service. We implement handler that will check the supplied documentVersion
* with documentVersion in current state and only apply the change of name if the two versions are equal.
* See {@code getOperationProcessingChain} and {@code handlePatchForStrictUpdate} for implementation details.
*/
public static class StrictUpdateRequest {
public static final String KIND = Utils.buildKind(StrictUpdateRequest.class);
// Field used by RequestRouter to route this 'kind' of request to
// special handler. See {@code getOperationProcessingChain}.
public String kind;
// Field to be updated after version check.
public String name;
// Version to match before update.
public long documentVersion;
}
/**
* Create a default factory service that starts instances of this service on POST.
* This method is optional, {@code FactoryService.create} can be used directly
*/
public static FactoryService createFactory() {
return FactoryService.create(ExampleService.class);
}
public static class ExampleServiceState extends ServiceDocument {
public static final String FIELD_NAME_KEY_VALUES = "keyValues";
public static final String FIELD_NAME_COUNTER = "counter";
public static final String FIELD_NAME_SORTED_COUNTER = "sortedCounter";
public static final String FIELD_NAME_NAME = "name";
public static final String FIELD_NAME_TAGS = "tags";
public static final String FIELD_NAME_ID = "id";
public static final String FIELD_NAME_REQUIRED = "required";
public static final String FIELD_NAME_IS_FROM_MIGRATION = "isFromMigration";
public static final long VERSION_RETENTION_LIMIT = 100;
public static final long VERSION_RETENTION_FLOOR = 20;
@UsageOption(option = PropertyUsageOption.OPTIONAL)
@PropertyOptions(indexing = { PropertyIndexingOption.EXPAND,
PropertyIndexingOption.FIXED_ITEM_NAME })
@Documentation(description = "@KEYVALUE", exampleString = "{ \"key1\" : \"value1\", \"key2\", \"value2\" }")
@UsageOption(option = PropertyUsageOption.AUTO_MERGE_IF_NOT_NULL)
public Map<String, String> keyValues = new HashMap<>();
@Documentation(description = "Version counter")
public Long counter;
@PropertyOptions(indexing = PropertyIndexingOption.SORT)
public Long sortedCounter;
@Documentation(description = "@NAME", exampleString = "myExample")
@UsageOption(option = PropertyUsageOption.AUTO_MERGE_IF_NOT_NULL)
@PropertyOptions(indexing = PropertyIndexingOption.SORT)
public String name;
@Documentation(description = "@TAGS", exampleString = "{ \"tag1\" , \"tag2\" }")
@UsageOption(option = PropertyUsageOption.AUTO_MERGE_IF_NOT_NULL)
public Set<String> tags = new HashSet<>();
@UsageOption(option = PropertyUsageOption.ID)
@UsageOption(option = PropertyUsageOption.REQUIRED)
public String id;
@UsageOption(option = PropertyUsageOption.REQUIRED)
public String required;
@PropertyOptions(
usage = PropertyUsageOption.SERVICE_USE,
indexing = PropertyIndexingOption.EXCLUDE_FROM_SIGNATURE)
@Since(ReleaseConstants.RELEASE_VERSION_1_5_1)
public Boolean isFromMigration;
}
public ExampleService() {
super(ExampleServiceState.class);
toggleOption(ServiceOption.PERSISTENCE, true);
toggleOption(ServiceOption.REPLICATION, true);
toggleOption(ServiceOption.INSTRUMENTATION, true);
toggleOption(ServiceOption.OWNER_SELECTION, true);
}
@Override
public void handleStart(Operation startPost) {
// Example of state validation on start:
// 1) Require that an initial state is provided
// 2) Require that the name field is not null
// A service could also accept a POST with no body or invalid state and correct it
if (!startPost.hasBody()) {
startPost.fail(new IllegalArgumentException("initial state is required"));
return;
}
ExampleServiceState s = startPost.getBody(ExampleServiceState.class);
if (s.name == null) {
startPost.fail(new IllegalArgumentException("name is required"));
return;
}
s.isFromMigration = startPost.hasPragmaDirective(Operation.PRAGMA_DIRECTIVE_FROM_MIGRATION_TASK);
startPost.complete();
}
@Override
@RouteDocumentation(description = "@PUT")
public void handlePut(Operation put) {
ExampleServiceState newState = getBody(put);
ExampleServiceState currentState = getState(put);
// example of structural validation: check if the new state is acceptable
if (currentState.name != null && newState.name == null) {
put.fail(new IllegalArgumentException("name must be set"));
return;
}
updateCounter(newState, currentState, false);
// replace current state, with the body of the request, in one step
setState(put, newState);
put.complete();
}
@Override
@RouteDocumentation(description = "Update selected fields of example document")
public void handlePatch(Operation patch) {
updateState(patch);
// updateState method already set the response body with the merged state
patch.complete();
}
/**
* A chain of filters, each of them is a {@link java.util.function.Predicate <Operation>}. When {@link #processRequest} is called
* the filters are evaluated sequentially, where each filter's {@link java.util.function.Predicate <Operation>#test} can return
* <code>true</code> to have the next filter in the chain continue process the request or
* <code>false</code> to stop processing.
*/
@Override
public OperationProcessingChain getOperationProcessingChain() {
if (super.getOperationProcessingChain() != null) {
return super.getOperationProcessingChain();
}
RequestRouter myRouter = new RequestRouter();
myRouter.register(
Action.PATCH,
new RequestRouter.RequestBodyMatcher<>(
StrictUpdateRequest.class, "kind",
StrictUpdateRequest.KIND),
this::handlePatchForStrictUpdate, "Strict update version check");
OperationProcessingChain opProcessingChain = new OperationProcessingChain(this);
opProcessingChain.add(myRouter);
setOperationProcessingChain(opProcessingChain);
return opProcessingChain;
}
private void handlePatchForStrictUpdate(Operation patch) {
ExampleServiceState currentState = getState(patch);
StrictUpdateRequest body = patch.getBody(StrictUpdateRequest.class);
if (body.kind == null || !body.kind.equals(Utils.buildKind(StrictUpdateRequest.class))) {
patch.fail(new IllegalArgumentException("invalid kind: %s" + body.kind));
return;
}
if (body.name == null) {
patch.fail(new IllegalArgumentException("name is required"));
return;
}
if (body.documentVersion != currentState.documentVersion) {
String errorString = String
.format("Current version %d. Request version %d",
currentState.documentVersion,
body.documentVersion);
patch.fail(new IllegalArgumentException(errorString));
return;
}
currentState.name = body.name;
patch.setBody(currentState);
patch.complete();
}
private ExampleServiceState updateState(Operation update) {
// A DCP service handler is state-less: Everything it needs is provided as part of the
// of the operation. The body and latest state associated with the service are retrieved
// below.
ExampleServiceState body = getBody(update);
ExampleServiceState currentState = getState(update);
// use helper that will merge automatically current state, with state supplied in body.
// Note the usage option PropertyUsageOption.AUTO_MERGE_IF_NOT_NULL has been set on the
// "name" field.
boolean hasStateChanged = Utils.mergeWithState(getStateDescription(),
currentState, body);
updateCounter(body, currentState, hasStateChanged);
if (body.documentExpirationTimeMicros != currentState.documentExpirationTimeMicros) {
currentState.documentExpirationTimeMicros = body.documentExpirationTimeMicros;
}
// response has latest, updated state
update.setBody(currentState);
return currentState;
}
private boolean updateCounter(ExampleServiceState body,
ExampleServiceState currentState, boolean hasStateChanged) {
if (body.counter != null) {
if (currentState.counter == null) {
currentState.counter = body.counter;
}
// deal with possible operation re-ordering by simply always
// moving the counter up
currentState.counter = Math.max(body.counter, currentState.counter);
body.counter = currentState.counter;
hasStateChanged = true;
}
return hasStateChanged;
}
@Override
public void handleDelete(Operation delete) {
if (!delete.hasBody()) {
delete.complete();
return;
}
// A DELETE can be used to both stop the service, mark it deleted in the index
// so its excluded from queries, but it can also set its expiration so its state
// history is permanently removed
ExampleServiceState currentState = getState(delete);
ExampleServiceState st = delete.getBody(ExampleServiceState.class);
if (st.documentExpirationTimeMicros > 0) {
currentState.documentExpirationTimeMicros = st.documentExpirationTimeMicros;
}
delete.complete();
}
/**
* Provides a default instance of the service state and allows service author to specify
* indexing and usage options, per service document property
*/
@Override
public ServiceDocument getDocumentTemplate() {
ServiceDocument template = super.getDocumentTemplate();
PropertyDescription pd = template.documentDescription.propertyDescriptions.get(
ExampleServiceState.FIELD_NAME_KEY_VALUES);
// instruct the index to deeply index the map
pd.indexingOptions.add(PropertyIndexingOption.EXPAND);
PropertyDescription pdTags = template.documentDescription.propertyDescriptions.get(
ExampleServiceState.FIELD_NAME_TAGS);
// instruct the index to deeply index the set of tags
pdTags.indexingOptions.add(PropertyIndexingOption.EXPAND);
PropertyDescription pdName = template.documentDescription.propertyDescriptions.get(
ExampleServiceState.FIELD_NAME_NAME);
// instruct the index to enable SORT on this field.
pdName.indexingOptions.add(PropertyIndexingOption.SORT);
// instruct the index to only keep the most recent N versions
template.documentDescription.versionRetentionLimit = ExampleServiceState.VERSION_RETENTION_LIMIT;
template.documentDescription.versionRetentionFloor = ExampleServiceState.VERSION_RETENTION_FLOOR;
return template;
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Structure specifying an attachment description.
*
* <h5>Valid Usage</h5>
*
* <ul>
* <li>{@code stencilInitialLayout} <b>must</b> not be {@link VK10#VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}, {@link VK12#VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL}, {@link VK12#VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL}, {@link VK10#VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL}, {@link VK10#VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL}, {@link VK11#VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL}, or {@link VK11#VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL}</li>
* <li>{@code stencilFinalLayout} <b>must</b> not be {@link VK10#VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}, {@link VK12#VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL}, {@link VK12#VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL}, {@link VK10#VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL}, {@link VK10#VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL}, {@link VK11#VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL}, or {@link VK11#VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL}</li>
* <li>{@code stencilFinalLayout} <b>must</b> not be {@link VK10#VK_IMAGE_LAYOUT_UNDEFINED IMAGE_LAYOUT_UNDEFINED} or {@link VK10#VK_IMAGE_LAYOUT_PREINITIALIZED IMAGE_LAYOUT_PREINITIALIZED}</li>
* </ul>
*
* <h5>Valid Usage (Implicit)</h5>
*
* <ul>
* <li>{@code sType} <b>must</b> be {@link VK12#VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT}</li>
* <li>{@code stencilInitialLayout} <b>must</b> be a valid {@code VkImageLayout} value</li>
* <li>{@code stencilFinalLayout} <b>must</b> be a valid {@code VkImageLayout} value</li>
* </ul>
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkAttachmentDescriptionStencilLayout {
* VkStructureType {@link #sType};
* void * {@link #pNext};
* VkImageLayout {@link #stencilInitialLayout};
* VkImageLayout {@link #stencilFinalLayout};
* }</code></pre>
*/
public class VkAttachmentDescriptionStencilLayout extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
STYPE,
PNEXT,
STENCILINITIALLAYOUT,
STENCILFINALLAYOUT;
static {
Layout layout = __struct(
__member(4),
__member(POINTER_SIZE),
__member(4),
__member(4)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
STYPE = layout.offsetof(0);
PNEXT = layout.offsetof(1);
STENCILINITIALLAYOUT = layout.offsetof(2);
STENCILFINALLAYOUT = layout.offsetof(3);
}
/**
* Creates a {@code VkAttachmentDescriptionStencilLayout} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkAttachmentDescriptionStencilLayout(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** the type of this structure. */
@NativeType("VkStructureType")
public int sType() { return nsType(address()); }
/** {@code NULL} or a pointer to a structure extending this structure. */
@NativeType("void *")
public long pNext() { return npNext(address()); }
/** the layout the stencil aspect of the attachment image subresource will be in when a render pass instance begins. */
@NativeType("VkImageLayout")
public int stencilInitialLayout() { return nstencilInitialLayout(address()); }
/** the layout the stencil aspect of the attachment image subresource will be transitioned to when a render pass instance ends. */
@NativeType("VkImageLayout")
public int stencilFinalLayout() { return nstencilFinalLayout(address()); }
/** Sets the specified value to the {@link #sType} field. */
public VkAttachmentDescriptionStencilLayout sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; }
/** Sets the {@link VK12#VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT} value to the {@link #sType} field. */
public VkAttachmentDescriptionStencilLayout sType$Default() { return sType(VK12.VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT); }
/** Sets the specified value to the {@link #pNext} field. */
public VkAttachmentDescriptionStencilLayout pNext(@NativeType("void *") long value) { npNext(address(), value); return this; }
/** Sets the specified value to the {@link #stencilInitialLayout} field. */
public VkAttachmentDescriptionStencilLayout stencilInitialLayout(@NativeType("VkImageLayout") int value) { nstencilInitialLayout(address(), value); return this; }
/** Sets the specified value to the {@link #stencilFinalLayout} field. */
public VkAttachmentDescriptionStencilLayout stencilFinalLayout(@NativeType("VkImageLayout") int value) { nstencilFinalLayout(address(), value); return this; }
/** Initializes this struct with the specified values. */
public VkAttachmentDescriptionStencilLayout set(
int sType,
long pNext,
int stencilInitialLayout,
int stencilFinalLayout
) {
sType(sType);
pNext(pNext);
stencilInitialLayout(stencilInitialLayout);
stencilFinalLayout(stencilFinalLayout);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkAttachmentDescriptionStencilLayout set(VkAttachmentDescriptionStencilLayout src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkAttachmentDescriptionStencilLayout} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkAttachmentDescriptionStencilLayout malloc() {
return wrap(VkAttachmentDescriptionStencilLayout.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkAttachmentDescriptionStencilLayout} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkAttachmentDescriptionStencilLayout calloc() {
return wrap(VkAttachmentDescriptionStencilLayout.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkAttachmentDescriptionStencilLayout} instance allocated with {@link BufferUtils}. */
public static VkAttachmentDescriptionStencilLayout create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkAttachmentDescriptionStencilLayout.class, memAddress(container), container);
}
/** Returns a new {@code VkAttachmentDescriptionStencilLayout} instance for the specified memory address. */
public static VkAttachmentDescriptionStencilLayout create(long address) {
return wrap(VkAttachmentDescriptionStencilLayout.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkAttachmentDescriptionStencilLayout createSafe(long address) {
return address == NULL ? null : wrap(VkAttachmentDescriptionStencilLayout.class, address);
}
/**
* Returns a new {@link VkAttachmentDescriptionStencilLayout.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkAttachmentDescriptionStencilLayout.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkAttachmentDescriptionStencilLayout.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkAttachmentDescriptionStencilLayout.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkAttachmentDescriptionStencilLayout.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkAttachmentDescriptionStencilLayout.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkAttachmentDescriptionStencilLayout.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkAttachmentDescriptionStencilLayout.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkAttachmentDescriptionStencilLayout.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
/**
* Returns a new {@code VkAttachmentDescriptionStencilLayout} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkAttachmentDescriptionStencilLayout malloc(MemoryStack stack) {
return wrap(VkAttachmentDescriptionStencilLayout.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkAttachmentDescriptionStencilLayout} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkAttachmentDescriptionStencilLayout calloc(MemoryStack stack) {
return wrap(VkAttachmentDescriptionStencilLayout.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkAttachmentDescriptionStencilLayout.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkAttachmentDescriptionStencilLayout.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkAttachmentDescriptionStencilLayout.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkAttachmentDescriptionStencilLayout.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #sType}. */
public static int nsType(long struct) { return UNSAFE.getInt(null, struct + VkAttachmentDescriptionStencilLayout.STYPE); }
/** Unsafe version of {@link #pNext}. */
public static long npNext(long struct) { return memGetAddress(struct + VkAttachmentDescriptionStencilLayout.PNEXT); }
/** Unsafe version of {@link #stencilInitialLayout}. */
public static int nstencilInitialLayout(long struct) { return UNSAFE.getInt(null, struct + VkAttachmentDescriptionStencilLayout.STENCILINITIALLAYOUT); }
/** Unsafe version of {@link #stencilFinalLayout}. */
public static int nstencilFinalLayout(long struct) { return UNSAFE.getInt(null, struct + VkAttachmentDescriptionStencilLayout.STENCILFINALLAYOUT); }
/** Unsafe version of {@link #sType(int) sType}. */
public static void nsType(long struct, int value) { UNSAFE.putInt(null, struct + VkAttachmentDescriptionStencilLayout.STYPE, value); }
/** Unsafe version of {@link #pNext(long) pNext}. */
public static void npNext(long struct, long value) { memPutAddress(struct + VkAttachmentDescriptionStencilLayout.PNEXT, value); }
/** Unsafe version of {@link #stencilInitialLayout(int) stencilInitialLayout}. */
public static void nstencilInitialLayout(long struct, int value) { UNSAFE.putInt(null, struct + VkAttachmentDescriptionStencilLayout.STENCILINITIALLAYOUT, value); }
/** Unsafe version of {@link #stencilFinalLayout(int) stencilFinalLayout}. */
public static void nstencilFinalLayout(long struct, int value) { UNSAFE.putInt(null, struct + VkAttachmentDescriptionStencilLayout.STENCILFINALLAYOUT, value); }
// -----------------------------------
/** An array of {@link VkAttachmentDescriptionStencilLayout} structs. */
public static class Buffer extends StructBuffer<VkAttachmentDescriptionStencilLayout, Buffer> implements NativeResource {
private static final VkAttachmentDescriptionStencilLayout ELEMENT_FACTORY = VkAttachmentDescriptionStencilLayout.create(-1L);
/**
* Creates a new {@code VkAttachmentDescriptionStencilLayout.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkAttachmentDescriptionStencilLayout#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkAttachmentDescriptionStencilLayout getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return the value of the {@link VkAttachmentDescriptionStencilLayout#sType} field. */
@NativeType("VkStructureType")
public int sType() { return VkAttachmentDescriptionStencilLayout.nsType(address()); }
/** @return the value of the {@link VkAttachmentDescriptionStencilLayout#pNext} field. */
@NativeType("void *")
public long pNext() { return VkAttachmentDescriptionStencilLayout.npNext(address()); }
/** @return the value of the {@link VkAttachmentDescriptionStencilLayout#stencilInitialLayout} field. */
@NativeType("VkImageLayout")
public int stencilInitialLayout() { return VkAttachmentDescriptionStencilLayout.nstencilInitialLayout(address()); }
/** @return the value of the {@link VkAttachmentDescriptionStencilLayout#stencilFinalLayout} field. */
@NativeType("VkImageLayout")
public int stencilFinalLayout() { return VkAttachmentDescriptionStencilLayout.nstencilFinalLayout(address()); }
/** Sets the specified value to the {@link VkAttachmentDescriptionStencilLayout#sType} field. */
public VkAttachmentDescriptionStencilLayout.Buffer sType(@NativeType("VkStructureType") int value) { VkAttachmentDescriptionStencilLayout.nsType(address(), value); return this; }
/** Sets the {@link VK12#VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT} value to the {@link VkAttachmentDescriptionStencilLayout#sType} field. */
public VkAttachmentDescriptionStencilLayout.Buffer sType$Default() { return sType(VK12.VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT); }
/** Sets the specified value to the {@link VkAttachmentDescriptionStencilLayout#pNext} field. */
public VkAttachmentDescriptionStencilLayout.Buffer pNext(@NativeType("void *") long value) { VkAttachmentDescriptionStencilLayout.npNext(address(), value); return this; }
/** Sets the specified value to the {@link VkAttachmentDescriptionStencilLayout#stencilInitialLayout} field. */
public VkAttachmentDescriptionStencilLayout.Buffer stencilInitialLayout(@NativeType("VkImageLayout") int value) { VkAttachmentDescriptionStencilLayout.nstencilInitialLayout(address(), value); return this; }
/** Sets the specified value to the {@link VkAttachmentDescriptionStencilLayout#stencilFinalLayout} field. */
public VkAttachmentDescriptionStencilLayout.Buffer stencilFinalLayout(@NativeType("VkImageLayout") int value) { VkAttachmentDescriptionStencilLayout.nstencilFinalLayout(address(), value); return this; }
}
}
| |
package uk.ac.york.mhe504.dblm.evaluation;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* This class compares the elements in a SQL file with the elements in a
* KDM file to verify that the extracted model is complete.
*
* @author mhe504@york.ac.uk
*
*/
public class ModelChecker {
//SQL
private static List<String> tableNames = new ArrayList<String>();
private static List<String> colNames = new ArrayList<String>();
private static List<String> ukNames = new ArrayList<String>();
private static List<String> keyNames = new ArrayList<String>();
private static List<String> tableCommentValues = new ArrayList<String>();
private static List<String> colCommentValues = new ArrayList<String>();
private static List<String> seqNames = new ArrayList<String>();
private static List<String> references = new ArrayList<String>();
//KDM
private static List<String> relationalTableNames = new ArrayList<String>();
private static List<String> itemUnitNames = new ArrayList<String>();
private static List<String> uniqueKeyNames = new ArrayList<String>();
private static List<String> indexElementNames = new ArrayList<String>();
private static List<String> seqContentNames = new ArrayList<String>();
private static List<String> annotationValues = new ArrayList<String>();
private static List<String> keyRelationships = new ArrayList<String>();
//CSV
private static List<String> tupleNames = new ArrayList<String>();
private static List<Double> tupleValues = new ArrayList<Double>();
//SMM
private static List<String> observationNames = new ArrayList<String>();
private static List<Double> directMeasurementValues = new ArrayList<Double>();
public static void main (String[] args) throws IOException
{
List<String> input = Files.readAllLines(Paths.get("sample-inputs/schema.sql"), Charset.defaultCharset());
List<String> model = Files.readAllLines(Paths.get("StructureModel.xmi"), Charset.defaultCharset());
extractSqlElementNames(input);
extractKdmElementNames(model);
outputResults();
input = Files.readAllLines(Paths.get("sample-inputs/loadmeasurements.csv"), Charset.defaultCharset());
model = Files.readAllLines(Paths.get("WorkloadModel.xmi"), Charset.defaultCharset());
extractCsvElements(input);
extractSmmElements(model);
outputSmmResults();
}
private static void extractCsvElements(List<String> input) {
for (int i =1; i < input.size(); i++)
{
String line = input.get(i);
tupleNames.add(line.split(",")[0]);
tupleValues.add(Double.parseDouble(line.split(",")[3]));
tupleValues.add(Double.parseDouble(line.split(",")[4]));
tupleValues.add(Double.parseDouble(line.split(",")[5]));
tupleValues.add(Double.parseDouble(line.split(",")[6]));
tupleValues.add(Double.parseDouble(line.split(",")[7]));
}
}
private static void extractSmmElements(List<String> model) {
for (String s : model)
{
if (s.contains("<observations name"))
{
observationNames.add(s.split("=")[1].replace("\"", "").replace(">", ""));
}
else if (s.contains("smm:DirectMeasurement"))
{
directMeasurementValues.add(Double.parseDouble(s.split("\"")[5]));
}
}
}
private static void outputSmmResults() {
System.out.println("================== ELEMENT COUNTS =================");
System.out.println("Measurement Set:\tSMM:");
System.out.println("Tuples:\t\t" + tupleNames.size() + "\tOberservations:\t" + observationNames.size());
System.out.println("Values:\t\t" + tupleValues.size() + "\tDirectMeasurements:\t" + directMeasurementValues.size());
System.out.println("==================================================");
System.out.println();
System.out.println("===================== TUPLES =====================");
System.out.format("%-30s%-30s\n", "CSV Tuple Names:", "SMM Observation Names:");
makeListSizeEqual(tupleNames, observationNames);
for (int i = 0; i < tupleNames.size(); i++)
System.out.format("%-30s%-30s\n", tupleNames.get(i), observationNames.get(i));
System.out.println("==================================================");
System.out.println("Tuple Names Match? " + areIdentical(tupleNames,observationNames));
System.out.println("==================================================");
System.out.println();
System.out.println("===================== VALUES =====================");
System.out.format("%-30s%-30s\n", "CSV Values:", "SMM DirectMeasurement values:");
makeListSizeEqual(tupleValues, directMeasurementValues);
for (int i = 0; i < tupleValues.size(); i++)
System.out.format("%-30s%-30s\n", tupleValues.get(i), directMeasurementValues.get(i));
System.out.println("==================================================");
System.out.println("Values Match? " + areIdentical(tupleValues,directMeasurementValues));
System.out.println("==================================================");
}
private static void outputResults(){
System.out.println("================== ELEMENT COUNTS =================");
System.out.println("SQL:\t\t\tKDM:");
System.out.println("Tables:\t\t" + tableNames.size() + "\tRelationalTables:\t" + relationalTableNames.size());
System.out.println("Columns:\t" + colNames.size() + "\tItemUnits:\t\t"+ itemUnitNames.size());
System.out.println("UKs:\t\t" + ukNames.size() + "\tUniqueKeys:\t\t" + uniqueKeyNames.size());
System.out.println("Indexes:\t" + keyNames.size() + "\tIndexElements:\t\t"+indexElementNames.size());
System.out.println("Relationships:\t" + references.size()+"\tKeyRelationships:\t" + keyRelationships.size());
System.out.println("Sequences:\t" + seqNames.size() + "\tSeqContents:\t\t" + seqContentNames.size());
System.out.println("Table Comments:\t" + tableCommentValues.size() + "\tAnnotations:\t\t" +annotationValues.size());
System.out.println("Col Comments:\t" + colCommentValues.size());
System.out.println("==================================================");
System.out.println();
System.out.println("========================== TABLES ==========================");
System.out.format("%-30s%-30s\n", "SQL CREATE TABLE:", "KDM RelationalTable Elements:");
makeListSizeEqual(relationalTableNames, tableNames);
for (int i = 0; i < relationalTableNames.size(); i++)
System.out.format("%-30s%-30s\n", tableNames.get(i), relationalTableNames.get(i));
System.out.println("============================================================");
System.out.println("Tables Match? " + areIdentical(tableNames,relationalTableNames));
System.out.println("============================================================");
System.out.println();
System.out.println("========================== COLUMNS ==========================");
System.out.format("%-30s%-30s\n", "SQL COLUMNS:", "KDM ItemUnit Elements:");
makeListSizeEqual(colNames, itemUnitNames);
for (int i = 0; i < colNames.size(); i++)
System.out.format("%-30s%-30s\n", colNames.get(i), itemUnitNames.get(i));
System.out.println("===========================================================");
System.out.println("Columns Match? " + areIdentical(colNames,itemUnitNames));
System.out.println("============================================================");
System.out.println();
System.out.println("========================== UKs ==========================");
System.out.format("%-30s%-30s\n", "SQL Unique + Primary Keys:", "KDM UniqueKeys Elements:");
makeListSizeEqual(ukNames, uniqueKeyNames);
for (int i = 0; i < ukNames.size(); i++)
System.out.format("%-30s%-30s\n", ukNames.get(i), uniqueKeyNames.get(i));
System.out.println("==========================================================");
System.out.println("PKs Match? " + areIdentical(ukNames,uniqueKeyNames));
System.out.println("==========================================================");
System.out.println();
System.out.println("========================== Indexes ==========================");
System.out.format("%-30s%-30s\n", "SQL Indexes:", "KDM IndexElement:");
makeListSizeEqual(keyNames, indexElementNames);
for (int i = 0; i < indexElementNames.size(); i++)
System.out.format("%-30s%-30s\n", keyNames.get(i), indexElementNames.get(i));
System.out.println("=============================================================");
System.out.println("Indexes Match? " + areIdentical(keyNames,indexElementNames));
System.out.println("=============================================================");
System.out.println();
System.out.println("========================== Sequences ==========================");
System.out.format("%-30s%-30s\n", "SQL CREATE SEQUENCE:", "KDM SeqContent Elements:");
makeListSizeEqual(seqNames, seqContentNames);
for (int i = 0; i < seqContentNames.size(); i++)
System.out.format("%-30s%-30s\n", seqNames.get(i), seqContentNames.get(i));
System.out.println("===============================================================");
System.out.println("Sequences Match? " + areIdentical(seqNames,seqContentNames));
System.out.println("===============================================================");
System.out.println();
System.out.println("======================================== Relationships =====================================");
System.out.format("%-50s%-50s\n", "SQL Relationships:", "KDM KeyRelationship Elements:");
makeListSizeEqual(keyRelationships, references);
for (int i = 0; i < references.size(); i++)
System.out.format("%-70s%-70s\n", references.get(i), keyRelationships.get(i));
System.out.println("============================================================================================");
System.out.println("Relationships Match? " + areIdentical(keyRelationships,references));
System.out.println("============================================================================================");
System.out.println("========================== Comments =============================");
System.out.println("SQL Table and Column Comments:");
List<String> allComments = tableCommentValues;
allComments.addAll(colCommentValues);
for (int i = 0; i < allComments.size(); i++)
System.out.println(allComments.get(i));
System.out.println();
System.out.println("KDM Annotation Elements:");
for (int i = 0; i < annotationValues.size(); i++)
System.out.println(annotationValues.get(i));
System.out.println("=================================================================");
System.out.println("Comments Match? " + areIdentical(annotationValues,allComments));
System.out.println("=================================================================");
}
private static void makeListSizeEqual(List<?> list1, List<?> list2)
{
int max = Math.max(list1.size(), list2.size());
while (list1.size() != max)
list1.add(null);
while (list2.size() != max)
list2.add(null);
}
private static boolean areIdentical(List<?> list1, List<?> list2)
{
Set<Object> set1 = new HashSet<Object>();
set1.addAll(list1);
Set<Object> set2 = new HashSet<Object>();
set2.addAll(list2);
return set1.equals(set2);
}
private static void extractKdmElementNames(List<String> modelFile)
{
for (int i =0; i < modelFile.size(); i++)
{
String line = modelFile.get(i);
if (line.contains("<dataElement xsi:type=\"data:RelationalTable\""))
{
relationalTableNames.add(line.split("\"")[3]);
}
else if (line.contains("<itemUnit ") && !line.contains("name=\"Value\""))
{
itemUnitNames.add(line.split("\"")[1]);
}
else if (line.contains("<dataElement xsi:type=\"data:UniqueKey\""))
{
uniqueKeyNames.add(resolvePK(line.split("\"")[5],modelFile));
}
else if (line.contains("<dataElement xsi:type=\"data:IndexElement\""))
{
indexElementNames.add(line.split("\"")[3].trim());
}
else if (line.contains("<dataElement xsi:type=\"data:KeyRelation\""))
{
String from = line.split("\"")[5];
String to = line.split("\"")[3];
keyRelationships.add(resolveReference(from,modelFile) + ";" + resolveReference(to,modelFile));
}
else if (line.contains("<dataElement xsi:type=\"data:SeqContent\""))
{
seqContentNames.add(line.split("\"")[3]);
}
else if (line.contains("<annotation "))
{
annotationValues.add(line.split("\"")[1]);
}
}
}
private static String resolveReference(String from, List<String> modelFile) {
from = from.substring(3);
String[] parts = from.split("/@");
int currentTable = -1;
int currentCol = -1;
String tableName = "";
String colName = "";
for (int i = 0; i < modelFile.size();i++ )
{
String line = modelFile.get(i);
while (Integer.parseInt(parts[2].split("\\.")[1]) != currentTable)
{
if (line.contains("<dataElement xsi:type=\"data:RelationalTable\""))
{
tableName = line.split("\"")[3];
currentTable++;
}
i++;
line = modelFile.get(i);
}
if (Integer.parseInt(parts[2].split("\\.")[1]) == currentTable)
{
while (Integer.parseInt(parts[3].split("\\.")[1]) != currentCol)
{
if (line.contains("<" + parts[3].split("\\.")[0]))
{
colName = resolvePK(line.split("\"")[5].trim(),modelFile);
currentCol++;
}
i++;
line = modelFile.get(i);
}
}
if (!colName.equals("") && !tableName.equals(""))
{
return tableName + "." + colName;
}
}
return tableName + "." + colName;
}
private static String resolvePK(String from, List<String> modelFile) {
from = from.split(" ")[0];
from = from.substring(3);
String[] parts = from.split("/@");
int currentTable = -1;
int currentCol = -1;
String tableName = "";
String colName = "";
for (int i = 0; i < modelFile.size();i++ )
{
String line = modelFile.get(i);
while (Integer.parseInt(parts[2].split("\\.")[1]) != currentTable)
{
if (line.contains("<dataElement xsi:type=\"data:RelationalTable\""))
{
tableName = line.split("\"")[3];
currentTable++;
}
i++;
line = modelFile.get(i);
}
if (Integer.parseInt(parts[2].split("\\.")[1]) == currentTable)
{
while (Integer.parseInt(parts[3].split("\\.")[1]) != currentCol)
{
if (line.contains("<" + parts[3].split("\\.")[0]))
{
colName = line.split("\"")[1];
currentCol++;
}
i++;
line = modelFile.get(i);
}
}
if (!colName.equals("") && !tableName.equals(""))
{
return colName;
}
}
return colName;
}
private static void extractSqlElementNames(List<String> schemaFile)
{
for (int i =0; i < schemaFile.size(); i++)
{
String line = schemaFile.get(i);
if (line.contains("CREATE TABLE `"))
{
String[] parts = line.split("`");
tableNames.add(parts[1]);
}
else if (line.contains(" CREATE TABLE \""))
{
String[] parts = line.split("\"");
tableNames.add(parts[1]);
}
else if (line.contains("CREATE TABLE "))
{
String[] parts = line.split(" ");
tableNames.add(parts[2]);
}
else if (line.contains(" PRIMARY KEY (`"))
{
String[] parts = line.split("`");
ukNames.add(parts[1]);
}
else if (line.contains("ADD PRIMARY KEY (\""))
{
String[] parts = line.split("\"");
ukNames.add(parts[3]);
}
else if (line.contains(" PRIMARY KEY (\""))
{
String[] parts = line.split("\"");
ukNames.add(parts[5]);
}
else if (line.contains(" PRIMARY KEY ("))
{
String[] parts = line.split(" ");
ukNames.add(parts[5].replace(",", ""));
}
else if (line.contains("UNIQUE KEY `"))
{
String[] parts = line.split("`");
ukNames.add(parts[3].trim());
}
else if (line.contains(" KEY `"))
{
String[] parts = line.split("`");
ukNames.add(parts[3]);
}
else if (line.contains("CREATE UNIQUE INDEX \""))
{
String[] parts = line.split("\"");
ukNames.add(parts[5]);
}
else if (line.contains(" INDEX \""))
{
String[] parts = line.split("\"");
keyNames.add(parts[1]);
}
else if (line.contains(" ADD CONSTRAINT ") && schemaFile.get(i+1).contains("UNIQUE"))
{
String[] parts = schemaFile.get(i+1).split(" ");
ukNames.add(parts[2].replace("(", "").replace(")",""));
}
else if (line.matches("^COMMENT ON TABLE.*"))
{
String[] parts = line.split(" IS '");
tableCommentValues.add(parts[1].replace("';", "").replace("'", ""));
}
else if (line.matches("^COMMENT ON COLUMN.*"))
{
String[] parts = line.split(" IS '");
colCommentValues.add(parts[1].replace("';", "").replace("'", ""));
}
else if (line.matches("^CREATE SEQUENCE.*"))
{
String[] parts = line.split(" ");
seqNames.add(parts[2]);
}
else if (line.contains("CREATE SEQUENCE \""))
{
String[] parts = line.split("\"");
seqNames.add(parts[1]);
}
else if (line.contains("ALTER TABLE") && i+1 !=schemaFile.size() && schemaFile.get(i+1).contains("FOREIGN KEY"))
{
String[] parts = line.split(" ");
String fromTable = parts[2];
String fromCol = schemaFile.get(i+1).split("\\(")[1];
fromCol = fromCol.replace(")", "");
String toTable =schemaFile.get(i+2).split("REFERENCES ")[1];
toTable = toTable.split(" ")[0];
String toCol=schemaFile.get(i+2).split(" \\(")[1];
toCol = toCol.split("\\)")[0];
references.add(fromTable.trim() + "." + fromCol.trim() + ";" + toTable.trim() + "." + toCol.trim());
}
else if (line.contains("\" FOREIGN KEY") && schemaFile.get(i+1).contains("REFERENCES \""))
{
String[] parts = line.split("\"");
String fromTable = parts[1];
String fromCol = parts[5];
parts = schemaFile.get(i+1).split("\"");
String toTable =parts[1];
String toCol=parts[3];
references.add(fromTable + "." + fromCol + ";" + toTable + "." + toCol);
}
else if (line.contains("FOREIGN KEY") && line.contains("REFERENCES"))
{
String[] parts = line.split("REFERENCES");
String[] toparts = parts[1].split("`");
String[] fromparts = parts[0].split("`");
String fromTable = tableNames.get(tableNames.size()-1);
String fromCol = fromparts[3];
String toTable =toparts[1];
String toCol=toparts[3];
references.add(toTable + "." + toCol + ";" + fromTable + "." + fromCol);
}
else if (line.contains(" `"))
{
String[] parts = line.split("`");
colNames.add(parts[1]);
}
else if (line.contains(" \"") || line.contains(" \""))
{
String[] parts = line.split("\"");
colNames.add(parts[1]);
}
else if (line.matches("\\s\\s\\w*.*") && !line.equals(" ") && !line.contains("REFERENCES") && !line.contains("CONSTRAINT") && !line.contains("\"")&& !line.contains(";"))
{
String[] parts = line.split(" ");
colNames.add(parts[2]);
}
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.wm.impl;
import com.intellij.ide.DataManager;
import com.intellij.ide.RecentProjectsManagerBase;
import com.intellij.ide.impl.DataManagerImpl;
import com.intellij.openapi.actionSystem.ex.ActionManagerEx;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.*;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.openapi.wm.WindowManagerListener;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.openapi.wm.impl.welcomeScreen.WelcomeFrame;
import com.intellij.ui.FrameState;
import com.intellij.ui.ScreenUtil;
import com.intellij.util.EventDispatcher;
import com.intellij.util.ui.JBInsets;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import com.sun.jna.platform.WindowUtils;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.peer.ComponentPeer;
import java.awt.peer.FramePeer;
import java.util.*;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
@State(
name = "WindowManager",
defaultStateAsResource = true,
storages = @Storage(value = "window.manager.xml", roamingType = RoamingType.DISABLED)
)
public final class WindowManagerImpl extends WindowManagerEx implements NamedComponent, PersistentStateComponent<Element> {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.WindowManagerImpl");
@NonNls public static final String FULL_SCREEN = "ide.frame.full.screen";
@NonNls private static final String FOCUSED_WINDOW_PROPERTY_NAME = "focusedWindow";
@NonNls private static final String FRAME_ELEMENT = "frame";
@NonNls private static final String EXTENDED_STATE_ATTR = "extended-state";
static {
try {
System.loadLibrary("jawt");
}
catch (Throwable t) {
LOG.info("jawt failed to load", t);
}
}
private Boolean myAlphaModeSupported;
private final EventDispatcher<WindowManagerListener> myEventDispatcher = EventDispatcher.create(WindowManagerListener.class);
private final CommandProcessor myCommandProcessor = new CommandProcessor();
private final WindowWatcher myWindowWatcher = new WindowWatcher();
/**
* That is the default layout.
*/
private final DesktopLayout myLayout = new DesktopLayout();
// null keys must be supported
private final Map<Project, IdeFrameImpl> myProjectToFrame = new HashMap<>();
private final Map<Project, Set<JDialog>> myDialogsToDispose = new HashMap<>();
@NotNull
final FrameInfo myDefaultFrameInfo = new FrameInfo();
private final WindowAdapter myActivationListener;
private final DataManager myDataManager;
private final ActionManagerEx myActionManager;
/**
* invoked by reflection
*/
public WindowManagerImpl(DataManager dataManager, ActionManagerEx actionManager) {
myDataManager = dataManager;
myActionManager = actionManager;
if (myDataManager instanceof DataManagerImpl) {
((DataManagerImpl)myDataManager).setWindowManager(this);
}
final Application application = ApplicationManager.getApplication();
if (!application.isUnitTestMode()) {
Disposer.register(application, this::disposeRootFrame);
}
final KeyboardFocusManager keyboardFocusManager = KeyboardFocusManager.getCurrentKeyboardFocusManager();
keyboardFocusManager.addPropertyChangeListener(FOCUSED_WINDOW_PROPERTY_NAME, myWindowWatcher);
myActivationListener = new WindowAdapter() {
@Override
public void windowActivated(WindowEvent e) {
Window activeWindow = e.getWindow();
if (activeWindow instanceof IdeFrameImpl) { // must be
proceedDialogDisposalQueue(((IdeFrameImpl)activeWindow).getProject());
}
}
};
if (UIUtil.hasLeakingAppleListeners()) {
UIUtil.addAwtListener(event -> {
if (event.getID() == ContainerEvent.COMPONENT_ADDED) {
if (((ContainerEvent)event).getChild() instanceof JViewport) {
UIUtil.removeLeakingAppleListeners();
}
}
}, AWTEvent.CONTAINER_EVENT_MASK, application);
}
}
@Override
@NotNull
public IdeFrameImpl[] getAllProjectFrames() {
final Collection<IdeFrameImpl> ideFrames = myProjectToFrame.values();
return ideFrames.toArray(new IdeFrameImpl[0]);
}
@Override
public JFrame findVisibleFrame() {
IdeFrameImpl[] frames = getAllProjectFrames();
return frames.length > 0 ? frames[0] : (JFrame)WelcomeFrame.getInstance();
}
@Override
public void addListener(final WindowManagerListener listener) {
myEventDispatcher.addListener(listener);
}
@Override
public void removeListener(final WindowManagerListener listener) {
myEventDispatcher.removeListener(listener);
}
@Override
public final Rectangle getScreenBounds() {
return ScreenUtil.getAllScreensRectangle();
}
@Override
public Rectangle getScreenBounds(@NotNull Project project) {
final GraphicsEnvironment environment = GraphicsEnvironment.getLocalGraphicsEnvironment();
final Point onScreen = getFrame(project).getLocationOnScreen();
final GraphicsDevice[] devices = environment.getScreenDevices();
for (final GraphicsDevice device : devices) {
final Rectangle bounds = device.getDefaultConfiguration().getBounds();
if (bounds.contains(onScreen)) {
return bounds;
}
}
return null;
}
@Override
public final boolean isInsideScreenBounds(final int x, final int y, final int width) {
return ScreenUtil.getAllScreensShape().contains(x, y, width, 1);
}
@Override
public final boolean isInsideScreenBounds(final int x, final int y) {
return ScreenUtil.getAllScreensShape().contains(x, y);
}
@Override
public final boolean isAlphaModeSupported() {
if (myAlphaModeSupported == null) {
myAlphaModeSupported = calcAlphaModelSupported();
}
return myAlphaModeSupported.booleanValue();
}
private static boolean calcAlphaModelSupported() {
if (AWTUtilitiesWrapper.isTranslucencyAPISupported()) {
return AWTUtilitiesWrapper.isTranslucencySupported(AWTUtilitiesWrapper.TRANSLUCENT);
}
try {
return WindowUtils.isWindowAlphaSupported();
}
catch (Throwable e) {
return false;
}
}
@Override
public final void setAlphaModeRatio(final Window window, final float ratio) {
if (!window.isDisplayable() || !window.isShowing()) {
throw new IllegalArgumentException("window must be displayable and showing. window=" + window);
}
if (ratio < 0.0f || ratio > 1.0f) {
throw new IllegalArgumentException("ratio must be in [0..1] range. ratio=" + ratio);
}
if (!isAlphaModeSupported() || !isAlphaModeEnabled(window)) {
return;
}
setAlphaMode(window, ratio);
}
private static void setAlphaMode(Window window, float ratio) {
try {
if (SystemInfo.isMacOSLeopard) {
if (window instanceof JWindow) {
((JWindow)window).getRootPane().putClientProperty("Window.alpha", 1.0f - ratio);
} else if (window instanceof JDialog) {
((JDialog)window).getRootPane().putClientProperty("Window.alpha", 1.0f - ratio);
} else if (window instanceof JFrame) {
((JFrame)window).getRootPane().putClientProperty("Window.alpha", 1.0f - ratio);
}
}
else if (AWTUtilitiesWrapper.isTranslucencySupported(AWTUtilitiesWrapper.TRANSLUCENT)) {
AWTUtilitiesWrapper.setWindowOpacity(window, 1.0f - ratio);
}
else {
WindowUtils.setWindowAlpha(window, 1.0f - ratio);
}
}
catch (Throwable e) {
LOG.debug(e);
}
}
@Override
public void setWindowMask(final Window window, @Nullable final Shape mask) {
try {
if (AWTUtilitiesWrapper.isTranslucencySupported(AWTUtilitiesWrapper.PERPIXEL_TRANSPARENT)) {
AWTUtilitiesWrapper.setWindowShape(window, mask);
}
else {
WindowUtils.setWindowMask(window, mask);
}
}
catch (Throwable e) {
LOG.debug(e);
}
}
@Override
public void setWindowShadow(Window window, WindowShadowMode mode) {
if (window instanceof JWindow) {
JRootPane root = ((JWindow)window).getRootPane();
root.putClientProperty("Window.shadow", mode == WindowShadowMode.DISABLED ? Boolean.FALSE : Boolean.TRUE);
root.putClientProperty("Window.style", mode == WindowShadowMode.SMALL ? "small" : null);
}
}
@Override
public void resetWindow(final Window window) {
try {
if (!isAlphaModeSupported()) return;
setWindowMask(window, null);
setAlphaMode(window, 0f);
setWindowShadow(window, WindowShadowMode.NORMAL);
}
catch (Throwable e) {
LOG.debug(e);
}
}
@Override
public final boolean isAlphaModeEnabled(final Window window) {
if (!window.isDisplayable() || !window.isShowing()) {
throw new IllegalArgumentException("window must be displayable and showing. window=" + window);
}
return isAlphaModeSupported();
}
@Override
public final void setAlphaModeEnabled(final Window window, final boolean state) {
if (!window.isDisplayable() || !window.isShowing()) {
throw new IllegalArgumentException("window must be displayable and showing. window=" + window);
}
}
@Override
public void hideDialog(JDialog dialog, Project project) {
if (project == null) {
dialog.dispose();
}
else {
IdeFrameImpl frame = getFrame(project);
if (frame.isActive()) {
dialog.dispose();
}
else {
queueForDisposal(dialog, project);
dialog.setVisible(false);
}
}
}
@Override
public void adjustContainerWindow(Component c, Dimension oldSize, Dimension newSize) {
if (c == null) return;
Window wnd = SwingUtilities.getWindowAncestor(c);
if (wnd instanceof JWindow) {
JBPopup popup = (JBPopup)((JWindow)wnd).getRootPane().getClientProperty(JBPopup.KEY);
if (popup != null) {
if (oldSize.height < newSize.height) {
Dimension size = popup.getSize();
size.height += newSize.height - oldSize.height;
popup.setSize(size);
popup.moveToFitScreen();
}
}
}
}
@Override
public final void doNotSuggestAsParent(final Window window) {
myWindowWatcher.doNotSuggestAsParent(window);
}
@Override
public final void dispatchComponentEvent(final ComponentEvent e) {
myWindowWatcher.dispatchComponentEvent(e);
}
@Override
@Nullable
public final Window suggestParentWindow(@Nullable final Project project) {
return myWindowWatcher.suggestParentWindow(project);
}
@Override
public final StatusBar getStatusBar(final Project project) {
IdeFrameImpl frame = myProjectToFrame.get(project);
return frame == null ? null : frame.getStatusBar();
}
@Override
public StatusBar getStatusBar(@NotNull Component c) {
return getStatusBar(c, null);
}
@Override
public StatusBar getStatusBar(@NotNull Component c, @Nullable Project project) {
Component parent = UIUtil.findUltimateParent(c);
if (parent instanceof IdeFrame) {
return ((IdeFrame)parent).getStatusBar().findChild(c);
}
IdeFrame frame = findFrameFor(project);
if (frame != null) {
return frame.getStatusBar().findChild(c);
}
assert false : "Cannot find status bar for " + c;
return null;
}
@Override
public IdeFrame findFrameFor(@Nullable final Project project) {
IdeFrame frame = null;
if (project != null) {
frame = project.isDefault() ? WelcomeFrame.getInstance() : getFrame(project);
if (frame == null) {
frame = myProjectToFrame.get(null);
}
}
else {
Container eachParent = getMostRecentFocusedWindow();
while(eachParent != null) {
if (eachParent instanceof IdeFrame) {
frame = (IdeFrame)eachParent;
break;
}
eachParent = eachParent.getParent();
}
if (frame == null) {
frame = tryToFindTheOnlyFrame();
}
}
return frame;
}
private static IdeFrame tryToFindTheOnlyFrame() {
IdeFrame candidate = null;
final Frame[] all = Frame.getFrames();
for (Frame each : all) {
if (each instanceof IdeFrame) {
if (candidate == null) {
candidate = (IdeFrame)each;
} else {
candidate = null;
break;
}
}
}
return candidate;
}
@Override
public final IdeFrameImpl getFrame(@Nullable final Project project) {
// no assert! otherwise WindowWatcher.suggestParentWindow fails for default project
//LOG.assertTrue(myProject2Frame.containsKey(project));
return myProjectToFrame.get(project);
}
@Override
public IdeFrame getIdeFrame(@Nullable final Project project) {
if (project != null) {
return getFrame(project);
}
final Window window = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
final Component parent = UIUtil.findUltimateParent(window);
if (parent instanceof IdeFrame) return (IdeFrame)parent;
final Frame[] frames = Frame.getFrames();
for (Frame each : frames) {
if (each instanceof IdeFrame) {
return (IdeFrame)each;
}
}
return null;
}
// this method is called when there is some opened project (IDE will not open Welcome Frame, but project)
public void showFrame() {
final IdeFrameImpl frame = new IdeFrameImpl(myActionManager, myDataManager, ApplicationManager.getApplication());
myProjectToFrame.put(null, frame);
Rectangle frameBounds = myDefaultFrameInfo.getBounds();
// set bounds even if maximized because on unmaximize we must restore previous frame bounds
// avoid situations when IdeFrame is out of all screens
if (frameBounds == null || !ScreenUtil.isVisible(frameBounds)) {
frameBounds = ScreenUtil.getMainScreenBounds();
int xOff = frameBounds.width / 8;
int yOff = frameBounds.height / 8;
//noinspection UseDPIAwareInsets
JBInsets.removeFrom(frameBounds, new Insets(yOff, xOff, yOff, xOff));
myDefaultFrameInfo.setBounds(frameBounds);
}
frame.setBounds(frameBounds);
frame.setExtendedState(myDefaultFrameInfo.getExtendedState());
frame.setVisible(true);
addFrameStateListener(frame);
}
@Override
public final IdeFrameImpl allocateFrame(@NotNull Project project) {
LOG.assertTrue(!myProjectToFrame.containsKey(project));
IdeFrameImpl frame = myProjectToFrame.remove(null);
if (frame == null) {
frame = new IdeFrameImpl(myActionManager, myDataManager, ApplicationManager.getApplication());
}
final FrameInfo frameInfo = ProjectFrameBounds.getInstance(project).getRawFrameInfo();
boolean addComponentListener = frameInfo == null;
if (frameInfo != null && frameInfo.getBounds() != null) {
// update default frame info - newly created project frame should be the same as last opened
myDefaultFrameInfo.copyFrom(frameInfo);
Rectangle rawBounds = frameInfo.getBounds();
myDefaultFrameInfo.setBounds(FrameBoundsConverter.convertFromDeviceSpace(rawBounds));
}
if (!(FrameState.isMaximized(frame.getExtendedState()) || FrameState.isFullScreen(frame)) ||
!FrameState.isMaximized(myDefaultFrameInfo.getExtendedState())) // going to quit maximized
{
Rectangle bounds = myDefaultFrameInfo.getBounds();
if (bounds != null) {
frame.setBounds(bounds);
}
}
frame.setExtendedState(myDefaultFrameInfo.getExtendedState());
frame.setProject(project);
myProjectToFrame.put(project, frame);
frame.setVisible(true);
frame.addWindowListener(myActivationListener);
if (addComponentListener) {
if (RecentProjectsManagerBase.getInstanceEx().isBatchOpening()) {
frame.toBack();
}
addFrameStateListener(frame);
}
myEventDispatcher.getMulticaster().frameCreated(frame);
return frame;
}
private void addFrameStateListener(@NotNull IdeFrameImpl frame) {
frame.addComponentListener(new ComponentAdapter() {
@Override
public void componentMoved(@NotNull ComponentEvent e) {
updateFrameBounds(frame);
}
});
}
private void proceedDialogDisposalQueue(Project project) {
Set<JDialog> dialogs = myDialogsToDispose.get(project);
if (dialogs == null) return;
for (JDialog dialog : dialogs) {
dialog.dispose();
}
myDialogsToDispose.put(project, null);
}
private void queueForDisposal(JDialog dialog, Project project) {
Set<JDialog> dialogs = myDialogsToDispose.computeIfAbsent(project, k -> new HashSet<>());
dialogs.add(dialog);
}
@Override
public final void releaseFrame(final IdeFrameImpl frame) {
myEventDispatcher.getMulticaster().beforeFrameReleased(frame);
final Project project = frame.getProject();
LOG.assertTrue(project != null);
frame.removeWindowListener(myActivationListener);
proceedDialogDisposalQueue(project);
frame.setProject(null);
frame.setTitle(null);
frame.setFileTitle(null, null);
myProjectToFrame.remove(project);
if (myProjectToFrame.isEmpty()) {
myProjectToFrame.put(null, frame);
}
else {
Disposer.dispose(frame.getStatusBar());
frame.dispose();
}
}
public final void disposeRootFrame() {
if (myProjectToFrame.size() == 1) {
final IdeFrameImpl rootFrame = myProjectToFrame.remove(null);
if (rootFrame != null) {
// disposing last frame if quitting
rootFrame.dispose();
}
}
}
@Override
public final Window getMostRecentFocusedWindow() {
return myWindowWatcher.getFocusedWindow();
}
@Override
public final Component getFocusedComponent(@NotNull final Window window) {
return myWindowWatcher.getFocusedComponent(window);
}
@Override
@Nullable
public final Component getFocusedComponent(@Nullable final Project project) {
return myWindowWatcher.getFocusedComponent(project);
}
/**
* Private part
*/
@Override
@NotNull
public final CommandProcessor getCommandProcessor() {
return myCommandProcessor;
}
@Override
public void loadState(Element state) {
final Element frameElement = state.getChild(FRAME_ELEMENT);
if (frameElement != null) {
int frameExtendedState = StringUtil.parseInt(frameElement.getAttributeValue(EXTENDED_STATE_ATTR), Frame.NORMAL);
if ((frameExtendedState & Frame.ICONIFIED) > 0) {
frameExtendedState = Frame.NORMAL;
}
myDefaultFrameInfo.setBounds(loadFrameBounds(frameElement));
myDefaultFrameInfo.setExtendedState(frameExtendedState);
}
final Element desktopElement = state.getChild(DesktopLayout.TAG);
if (desktopElement != null) {
myLayout.readExternal(desktopElement);
}
}
@Nullable
private static Rectangle loadFrameBounds(@NotNull Element frameElement) {
Rectangle bounds = ProjectFrameBoundsKt.deserializeBounds(frameElement);
return bounds == null ? null : FrameBoundsConverter.convertFromDeviceSpace(bounds);
}
@Nullable
@Override
public Element getState() {
Element frameState = getFrameState();
if (frameState == null) {
return null;
}
Element state = new Element("state");
state.addContent(frameState);
// Save default layout
Element layoutElement = myLayout.writeExternal(DesktopLayout.TAG);
if (layoutElement != null) {
state.addContent(layoutElement);
}
return state;
}
@Nullable
private Element getFrameState() {
// Save frame bounds
final Project[] projects = ProjectManager.getInstance().getOpenProjects();
if (projects.length == 0) {
return null;
}
Project project = projects[0];
FrameInfo frameInfo = ProjectFrameBoundsKt.getFrameInfoInDeviceSpace(this, project);
if (frameInfo == null) {
return null;
}
final Element frameElement = new Element(FRAME_ELEMENT);
Rectangle rectangle = frameInfo.getBounds();
if (rectangle != null) {
ProjectFrameBoundsKt.serializeBounds(rectangle, frameElement);
}
if (frameInfo.getExtendedState() != Frame.NORMAL) {
frameElement.setAttribute(EXTENDED_STATE_ATTR, Integer.toString(frameInfo.getExtendedState()));
}
return frameElement;
}
int updateFrameBounds(@NotNull IdeFrameImpl frame) {
int extendedState = frame.getExtendedState();
if (SystemInfo.isMacOSLion) {
ComponentPeer peer = frame.getPeer();
if (peer instanceof FramePeer) {
// frame.state is not updated by jdk so get it directly from peer
extendedState = ((FramePeer)peer).getState();
}
}
boolean isMaximized = FrameState.isMaximized(extendedState) ||
isFullScreenSupportedInCurrentOS() && frame.isInFullScreen();
Rectangle frameBounds = myDefaultFrameInfo.getBounds();
boolean usePreviousBounds = isMaximized &&
frameBounds != null &&
frame.getBounds().contains(new Point((int)frameBounds.getCenterX(), (int)frameBounds.getCenterY()));
if (!usePreviousBounds) {
myDefaultFrameInfo.setBounds(frame.getBounds());
}
return extendedState;
}
@Override
public final DesktopLayout getLayout() {
return myLayout;
}
@Override
public final void setLayout(final DesktopLayout layout) {
myLayout.copyFrom(layout);
}
@Override
@NotNull
public final String getComponentName() {
return "WindowManager";
}
public WindowWatcher getWindowWatcher() {
return myWindowWatcher;
}
@Override
public boolean isFullScreenSupportedInCurrentOS() {
return SystemInfo.isMacOSLion || SystemInfo.isWindows || SystemInfo.isXWindow && X11UiUtil.isFullScreenSupported();
}
static boolean isFloatingMenuBarSupported() {
return !SystemInfo.isMac && getInstance().isFullScreenSupportedInCurrentOS();
}
/**
* Converts the frame bounds b/w the user space (JRE-managed HiDPI mode) and the device space (IDE-managed HiDPI mode).
* See {@link UIUtil#isJreHiDPIEnabled()}
*/
static class FrameBoundsConverter {
/**
* @param bounds the bounds in the device space
* @return the bounds in the user space
*/
@NotNull
static Rectangle convertFromDeviceSpace(@NotNull Rectangle bounds) {
Rectangle b = bounds.getBounds();
if (!shouldConvert()) return b;
try {
for (GraphicsDevice gd : GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices()) {
Rectangle devBounds = gd.getDefaultConfiguration().getBounds(); // in user space
scaleUp(devBounds, gd.getDefaultConfiguration()); // to device space
Rectangle2D.Float devBounds2D = new Rectangle2D.Float(devBounds.x, devBounds.y, devBounds.width, devBounds.height);
Point2D.Float center2d = new Point2D.Float(b.x + b.width / 2, b.y + b.height / 2);
if (devBounds2D.contains(center2d)) {
scaleDown(b, gd.getDefaultConfiguration());
break;
}
}
}
catch (HeadlessException ignore) {
}
return b;
}
/**
* @param gc the graphics config
* @param bounds the bounds in the user space
* @return the bounds in the device space
*/
public static Rectangle convertToDeviceSpace(GraphicsConfiguration gc, @NotNull Rectangle bounds) {
Rectangle b = bounds.getBounds();
if (!shouldConvert()) return b;
try {
scaleUp(b, gc);
}
catch (HeadlessException ignore) {
}
return b;
}
private static boolean shouldConvert() {
if (SystemInfo.isLinux || // JRE-managed HiDPI mode is not yet implemented (pending)
SystemInfo.isMac) // JRE-managed HiDPI mode is permanent
{
return false;
}
// device space equals user space
return UIUtil.isJreHiDPIEnabled();
}
private static void scaleUp(@NotNull Rectangle bounds, @NotNull GraphicsConfiguration gc) {
scale(bounds, gc.getBounds(), JBUI.sysScale(gc));
}
private static void scaleDown(@NotNull Rectangle bounds, @NotNull GraphicsConfiguration gc) {
float scale = JBUI.sysScale(gc);
assert scale != 0;
scale(bounds, gc.getBounds(), 1 / scale);
}
private static void scale(@NotNull Rectangle bounds, @NotNull Rectangle deviceBounds, float scale) {
// On Windows, JB SDK transforms the screen bounds to the user space as follows:
// [x, y, width, height] -> [x, y, width / scale, height / scale]
// xy are not transformed in order to avoid overlapping of the screen bounds in multi-dpi env.
// scale the delta b/w xy and deviceBounds.xy
int x = (int)Math.floor(deviceBounds.x + (bounds.x - deviceBounds.x) * scale);
int y = (int)Math.floor(deviceBounds.y + (bounds.y - deviceBounds.y) * scale);
bounds.setBounds(x, y, (int)Math.ceil(bounds.width * scale), (int)Math.ceil(bounds.height * scale));
}
}
}
| |
/*
* Copyright 2006-2009 Odysseus Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.el;
import java.beans.FeatureDescriptor;
import java.util.Iterator;
import java.util.List;
/**
* Defines property resolution behavior on instances of java.util.List. This resolver handles base
* objects of type java.util.List. It accepts any object as a property and coerces that object into
* an integer index into the list. The resulting value is the value in the list at that index. This
* resolver can be constructed in read-only mode, which means that isReadOnly will always return
* true and {@link #setValue(ELContext, Object, Object, Object)} will always throw
* PropertyNotWritableException. ELResolvers are combined together using {@link CompositeELResolver}
* s, to define rich semantics for evaluating an expression. See the javadocs for {@link ELResolver}
* for details.
*/
public class ListELResolver extends ELResolver {
private final boolean readOnly;
/**
* Creates a new read/write ListELResolver.
*/
public ListELResolver() {
this(false);
}
/**
* Creates a new ListELResolver whose read-only status is determined by the given parameter.
*
* @param readOnly
* true if this resolver cannot modify lists; false otherwise.
*/
public ListELResolver(boolean readOnly) {
this.readOnly = readOnly;
}
/**
* If the base object is a list, returns the most general type that this resolver accepts for
* the property argument. Otherwise, returns null. Assuming the base is a List, this method will
* always return Integer.class. This is because Lists accept integers as their index.
*
* @param context
* The context of this evaluation.
* @param base
* The list to analyze. Only bases of type List are handled by this resolver.
* @return null if base is not a List; otherwise Integer.class.
*/
@Override
public Class<?> getCommonPropertyType(ELContext context, Object base) {
return isResolvable(base) ? Integer.class : null;
}
/**
* Always returns null, since there is no reason to iterate through set set of all integers.
*
* @param context
* The context of this evaluation.
* @param base
* The list to analyze. Only bases of type List are handled by this resolver.
* @return null.
*/
@Override
public Iterator<FeatureDescriptor> getFeatureDescriptors(ELContext context, Object base) {
return null;
}
/**
* If the base object is a list, returns the most general acceptable type for a value in this
* list. If the base is a List, the propertyResolved property of the ELContext object must be
* set to true by this resolver, before returning. If this property is not true after this
* method is called, the caller should ignore the return value. Assuming the base is a List,
* this method will always return Object.class. This is because Lists accept any object as an
* element.
*
* @param context
* The context of this evaluation.
* @param base
* The list to analyze. Only bases of type List are handled by this resolver.
* @param property
* The index of the element in the list to return the acceptable type for. Will be
* coerced into an integer, but otherwise ignored by this resolver.
* @return If the propertyResolved property of ELContext was set to true, then the most general
* acceptable type; otherwise undefined.
* @throws PropertyNotFoundException
* if the given index is out of bounds for this list.
* @throws IllegalArgumentException
* if the property could not be coerced into an integer.
* @throws NullPointerException
* if context is null
* @throws ELException
* if an exception was thrown while performing the property or variable resolution.
* The thrown exception must be included as the cause property of this exception, if
* available.
*/
@Override
public Class<?> getType(ELContext context, Object base, Object property) {
if (context == null) {
throw new NullPointerException("context is null");
}
Class<?> result = null;
if (isResolvable(base)) {
toIndex((List<?>) base, property);
result = Object.class;
context.setPropertyResolved(true);
}
return result;
}
/**
* If the base object is a list, returns the value at the given index. The index is specified by
* the property argument, and coerced into an integer. If the coercion could not be performed,
* an IllegalArgumentException is thrown. If the index is out of bounds, null is returned. If
* the base is a List, the propertyResolved property of the ELContext object must be set to true
* by this resolver, before returning. If this property is not true after this method is called,
* the caller should ignore the return value.
*
* @param context
* The context of this evaluation.
* @param base
* The list to analyze. Only bases of type List are handled by this resolver.
* @param property
* The index of the element in the list to return the acceptable type for. Will be
* coerced into an integer, but otherwise ignored by this resolver.
* @return If the propertyResolved property of ELContext was set to true, then the value at the
* given index or null if the index was out of bounds. Otherwise, undefined.
* @throws PropertyNotFoundException
* if the given index is out of bounds for this list.
* @throws IllegalArgumentException
* if the property could not be coerced into an integer.
* @throws NullPointerException
* if context is null
* @throws ELException
* if an exception was thrown while performing the property or variable resolution.
* The thrown exception must be included as the cause property of this exception, if
* available.
*/
@Override
public Object getValue(ELContext context, Object base, Object property) {
if (context == null) {
throw new NullPointerException("context is null");
}
Object result = null;
if (isResolvable(base)) {
int index = toIndex(null, property);
List<?> list = (List<?>) base;
result = index < 0 || index >= list.size() ? null : list.get(index);
context.setPropertyResolved(true);
}
return result;
}
/**
* If the base object is a list, returns whether a call to
* {@link #setValue(ELContext, Object, Object, Object)} will always fail. If the base is a List,
* the propertyResolved property of the ELContext object must be set to true by this resolver,
* before returning. If this property is not true after this method is called, the caller should
* ignore the return value. If this resolver was constructed in read-only mode, this method will
* always return true. If a List was created using java.util.Collections.unmodifiableList(List),
* this method must return true. Unfortunately, there is no Collections API method to detect
* this. However, an implementation can create a prototype unmodifiable List and query its
* runtime type to see if it matches the runtime type of the base object as a workaround.
*
* @param context
* The context of this evaluation.
* @param base
* The list to analyze. Only bases of type List are handled by this resolver.
* @param property
* The index of the element in the list to return the acceptable type for. Will be
* coerced into an integer, but otherwise ignored by this resolver.
* @return If the propertyResolved property of ELContext was set to true, then true if calling
* the setValue method will always fail or false if it is possible that such a call may
* succeed; otherwise undefined.
* @throws PropertyNotFoundException
* if the given index is out of bounds for this list.
* @throws IllegalArgumentException
* if the property could not be coerced into an integer.
* @throws NullPointerException
* if context is null
* @throws ELException
* if an exception was thrown while performing the property or variable resolution.
* The thrown exception must be included as the cause property of this exception, if
* available.
*/
@Override
public boolean isReadOnly(ELContext context, Object base, Object property) {
if (context == null) {
throw new NullPointerException("context is null");
}
if (isResolvable(base)) {
toIndex((List<?>) base, property);
context.setPropertyResolved(true);
}
return readOnly;
}
/**
* If the base object is a list, attempts to set the value at the given index with the given
* value. The index is specified by the property argument, and coerced into an integer. If the
* coercion could not be performed, an IllegalArgumentException is thrown. If the index is out
* of bounds, a PropertyNotFoundException is thrown. If the base is a List, the propertyResolved
* property of the ELContext object must be set to true by this resolver, before returning. If
* this property is not true after this method is called, the caller can safely assume no value
* was set. If this resolver was constructed in read-only mode, this method will always throw
* PropertyNotWritableException. If a List was created using
* java.util.Collections.unmodifiableList(List), this method must throw
* PropertyNotWritableException. Unfortunately, there is no Collections API method to detect
* this. However, an implementation can create a prototype unmodifiable List and query its
* runtime type to see if it matches the runtime type of the base object as a workaround.
*
* @param context
* The context of this evaluation.
* @param base
* The list to analyze. Only bases of type List are handled by this resolver.
* @param property
* The index of the element in the list to return the acceptable type for. Will be
* coerced into an integer, but otherwise ignored by this resolver.
* @param value
* The value to be set at the given index.
* @throws ClassCastException
* if the class of the specified element prevents it from being added to this list.
* @throws PropertyNotFoundException
* if the given index is out of bounds for this list.
* @throws PropertyNotWritableException
* if this resolver was constructed in read-only mode, or if the set operation is
* not supported by the underlying list.
* @throws IllegalArgumentException
* if the property could not be coerced into an integer.
* @throws NullPointerException
* if context is null
* @throws ELException
* if an exception was thrown while performing the property or variable resolution.
* The thrown exception must be included as the cause property of this exception, if
* available.
*/
@Override
@SuppressWarnings("unchecked")
public void setValue(ELContext context, Object base, Object property, Object value) {
if (context == null) {
throw new NullPointerException("context is null");
}
if (isResolvable(base)) {
if (readOnly) {
throw new PropertyNotWritableException("resolver is read-only");
}
List list = (List) base;
int index = toIndex(list, property);
try {
list.set(index, value);
} catch (UnsupportedOperationException e) {
throw new PropertyNotWritableException(e);
} catch (ArrayStoreException e) {
throw new IllegalArgumentException(e);
}
context.setPropertyResolved(true);
}
}
/**
* Test whether the given base should be resolved by this ELResolver.
*
* @param base
* The bean to analyze.
* @param property
* The name of the property to analyze. Will be coerced to a String.
* @return base instanceof List
*/
private static final boolean isResolvable(Object base) {
return base instanceof List<?>;
}
/**
* Convert the given property to an index in (list) base.
*
* @param base
* The bean to analyze.
* @param property
* The name of the property to analyze. Will be coerced to a String.
* @return The index of property in base.
* @throws IllegalArgumentException
* if base property cannot be coerced to an integer.
* @throws PropertyNotFoundException
* if base is not null and the computed index is out of bounds for base.
*/
private static final int toIndex(List<?> base, Object property) {
int index = 0;
if (property instanceof Number) {
index = ((Number) property).intValue();
} else if (property instanceof String) {
try {
index = Integer.valueOf((String) property);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Cannot parse list index: " + property);
}
} else if (property instanceof Character) {
index = ((Character) property).charValue();
} else if (property instanceof Boolean) {
index = ((Boolean) property).booleanValue() ? 1 : 0;
} else {
throw new IllegalArgumentException("Cannot coerce property to list index: " + property);
}
if (base != null && (index < 0 || index >= base.size())) {
throw new PropertyNotFoundException("List index out of bounds: " + index);
}
return index;
}
}
| |
/*
* Copyright 2019 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.endpoint;
import static java.util.Objects.requireNonNull;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.concurrent.CompletableFuture;
import javax.annotation.Nullable;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.linecorp.armeria.client.Endpoint;
import com.linecorp.armeria.client.endpoint.FileWatcherRegistry.FileWatchRegisterKey;
/**
* A {@link Properties} backed {@link EndpointGroup}. The list of {@link Endpoint}s are loaded from the
* {@link Properties}.
*/
public final class PropertiesEndpointGroup extends DynamicEndpointGroup {
private static FileWatcherRegistry registry = new FileWatcherRegistry();
/**
* Resets the registry for {@link PropertiesEndpointGroup}.
* @throws Exception when an exception occurs while closing the {@link FileWatcherRegistry}.
*/
@VisibleForTesting
static void resetRegistry() throws Exception {
registry.close();
registry = new FileWatcherRegistry();
}
/**
* Returns a new {@link PropertiesEndpointGroup} created from the specified classpath resource.
* The value of each property whose name starts with {@code endpointKeyPrefix} will be parsed with
* {@link Endpoint#parse(String)}, and then loaded into the {@link PropertiesEndpointGroup}, e.g.
*
* <pre>{@code
* # endpointKeyPrefix = 'example.hosts.'
* example.hosts.0=example1.com:36462
* example.hosts.1=example2.com:36462
* example.hosts.2=example3.com:36462
* }</pre>
*
* @param resourceName the name of the resource where the list of {@link Endpoint}s is loaded from
* @param endpointKeyPrefix the property name prefix
*/
public static PropertiesEndpointGroup of(ClassLoader classLoader, String resourceName,
String endpointKeyPrefix) {
return builder(classLoader, resourceName, endpointKeyPrefix).build();
}
/**
* Returns a new {@link PropertiesEndpointGroup} created from the specified {@link Properties}.
* The value of each property whose name starts with {@code endpointKeyPrefix} will be parsed with
* {@link Endpoint#parse(String)}, and then loaded into the {@link PropertiesEndpointGroup}, e.g.
*
* <pre>{@code
* # endpointKeyPrefix = 'example.hosts.'
* example.hosts.0=example1.com:36462
* example.hosts.1=example2.com:36462
* example.hosts.2=example3.com:36462
* }</pre>
*
* @param properties the {@link Properties} where the list of {@link Endpoint}s is loaded from
* @param endpointKeyPrefix the property name prefix
*/
public static PropertiesEndpointGroup of(Properties properties, String endpointKeyPrefix) {
return builder(properties, endpointKeyPrefix).build();
}
/**
* Returns a new {@link PropertiesEndpointGroup} created from the file at the specified {@link Path}.
* Any updates in the file will trigger a dynamic reload. The value of each property whose name starts
* with {@code endpointKeyPrefix} will be parsed with {@link Endpoint#parse(String)}, and then loaded
* into the {@link PropertiesEndpointGroup}, e.g.
*
* <pre>{@code
* # endpointKeyPrefix = 'example.hosts.'
* example.hosts.0=example1.com:36462
* example.hosts.1=example2.com:36462
* example.hosts.2=example3.com:36462
* }</pre>
*
* @param path the path of the file where list of {@link Endpoint}s is loaded from
* @param endpointKeyPrefix the property name prefix
*/
public static PropertiesEndpointGroup of(Path path, String endpointKeyPrefix) {
return builder(path, endpointKeyPrefix).build();
}
/**
* Returns a new {@link PropertiesEndpointGroupBuilder} created from the specified classpath resource.
* The value of each property whose name starts with {@code endpointKeyPrefix} will be parsed with
* {@link Endpoint#parse(String)}, and then loaded into the {@link PropertiesEndpointGroup}, e.g.
*
* <pre>{@code
* # endpointKeyPrefix = 'example.hosts.'
* example.hosts.0=example1.com:36462
* example.hosts.1=example2.com:36462
* example.hosts.2=example3.com:36462
* }</pre>
*
* @param resourceName the name of the resource where the list of {@link Endpoint}s is loaded from
* @param endpointKeyPrefix the property name prefix
*/
public static PropertiesEndpointGroupBuilder builder(ClassLoader classLoader, String resourceName,
String endpointKeyPrefix) {
requireNonNull(classLoader, "classLoader");
requireNonNull(resourceName, "resourceName");
requireNonNull(endpointKeyPrefix, "endpointKeyPrefix");
return new PropertiesEndpointGroupBuilder(classLoader, resourceName, endpointKeyPrefix);
}
/**
* Returns a new {@link PropertiesEndpointGroupBuilder} created from the specified {@link Properties}.
* The value of each property whose name starts with {@code endpointKeyPrefix} will be parsed with
* {@link Endpoint#parse(String)}, and then loaded into the {@link PropertiesEndpointGroup}, e.g.
*
* <pre>{@code
* # endpointKeyPrefix = 'example.hosts.'
* example.hosts.0=example1.com:36462
* example.hosts.1=example2.com:36462
* example.hosts.2=example3.com:36462
* }</pre>
*
* @param properties the {@link Properties} where the list of {@link Endpoint}s is loaded from
* @param endpointKeyPrefix the property name prefix
*/
public static PropertiesEndpointGroupBuilder builder(Properties properties, String endpointKeyPrefix) {
requireNonNull(properties, "properties");
requireNonNull(endpointKeyPrefix, "endpointKeyPrefix");
return new PropertiesEndpointGroupBuilder(properties, endpointKeyPrefix);
}
/**
* Returns a new {@link PropertiesEndpointGroupBuilder} created from the file at the specified
* {@link Path}. Any updates in the file will trigger a dynamic reload. The value of each property
* whose name starts with {@code endpointKeyPrefix} will be parsed with {@link Endpoint#parse(String)},
* and then loaded into the {@link PropertiesEndpointGroup}, e.g.
*
* <pre>{@code
* # endpointKeyPrefix = 'example.hosts.'
* example.hosts.0=example1.com:36462
* example.hosts.1=example2.com:36462
* example.hosts.2=example3.com:36462
* }</pre>
*
* @param path the path of the file where list of {@link Endpoint}s is loaded from
* @param endpointKeyPrefix the property name prefix
*/
public static PropertiesEndpointGroupBuilder builder(Path path, String endpointKeyPrefix) {
requireNonNull(path, "path");
requireNonNull(endpointKeyPrefix, "endpointKeyPrefix");
return new PropertiesEndpointGroupBuilder(path, endpointKeyPrefix);
}
@Nullable
private FileWatchRegisterKey watchRegisterKey;
PropertiesEndpointGroup(EndpointSelectionStrategy selectionStrategy, List<Endpoint> endpoints) {
super(selectionStrategy);
setEndpoints(endpoints);
}
PropertiesEndpointGroup(EndpointSelectionStrategy selectionStrategy,
Path path, String endpointKeyPrefix, int defaultPort) {
super(selectionStrategy);
setEndpoints(loadEndpoints(
path,
requireNonNull(endpointKeyPrefix, "endpointKeyPrefix"),
defaultPort));
watchRegisterKey = registry.register(path, () ->
setEndpoints(loadEndpoints(path, endpointKeyPrefix, defaultPort)));
}
private static List<Endpoint> loadEndpoints(Path path, String endpointKeyPrefix, int defaultPort) {
try (InputStream in = Files.newInputStream(path)) {
final Properties props = new Properties();
props.load(in);
return loadEndpoints(props, endpointKeyPrefix, defaultPort);
} catch (IOException e) {
throw new IllegalArgumentException("failed to load: " + path, e);
}
}
private static List<Endpoint> loadEndpoints(Properties properties, String endpointKeyPrefix,
int defaultPort) {
if (!endpointKeyPrefix.endsWith(".")) {
endpointKeyPrefix += ".";
}
final List<Endpoint> newEndpoints = new ArrayList<>();
for (Entry<Object, Object> e : properties.entrySet()) {
final String key = (String) e.getKey();
final String value = (String) e.getValue();
if (key.startsWith(endpointKeyPrefix)) {
final Endpoint endpoint = Endpoint.parse(value);
newEndpoints.add(defaultPort == 0 ? endpoint : endpoint.withDefaultPort(defaultPort));
}
}
return ImmutableList.copyOf(newEndpoints);
}
@Override
protected void doCloseAsync(CompletableFuture<?> future) {
if (watchRegisterKey != null) {
registry.unregister(watchRegisterKey);
}
future.complete(null);
}
}
| |
package aQute.bnd.differ;
import static aQute.bnd.service.diff.Delta.CHANGED;
import static aQute.bnd.service.diff.Delta.IGNORED;
import static aQute.bnd.service.diff.Delta.MAJOR;
import static aQute.bnd.service.diff.Delta.MICRO;
import static aQute.bnd.service.diff.Delta.MINOR;
import static aQute.bnd.service.diff.Type.ACCESS;
import static aQute.bnd.service.diff.Type.ANNOTATED;
import static aQute.bnd.service.diff.Type.ANNOTATION;
import static aQute.bnd.service.diff.Type.API;
import static aQute.bnd.service.diff.Type.CLASS;
import static aQute.bnd.service.diff.Type.CLASS_VERSION;
import static aQute.bnd.service.diff.Type.CONSTANT;
import static aQute.bnd.service.diff.Type.ENUM;
import static aQute.bnd.service.diff.Type.EXTENDS;
import static aQute.bnd.service.diff.Type.FIELD;
import static aQute.bnd.service.diff.Type.IMPLEMENTS;
import static aQute.bnd.service.diff.Type.INTERFACE;
import static aQute.bnd.service.diff.Type.METHOD;
import static aQute.bnd.service.diff.Type.PACKAGE;
import static aQute.bnd.service.diff.Type.PROPERTY;
import static aQute.bnd.service.diff.Type.RETURN;
import static aQute.bnd.service.diff.Type.VERSION;
import java.lang.reflect.Array;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.jar.Manifest;
import aQute.bnd.header.Attrs;
import aQute.bnd.header.OSGiHeader;
import aQute.bnd.osgi.Analyzer;
import aQute.bnd.osgi.Annotation;
import aQute.bnd.osgi.ClassDataCollector;
import aQute.bnd.osgi.Clazz;
import aQute.bnd.osgi.Clazz.JAVA;
import aQute.bnd.osgi.Clazz.MethodDef;
import aQute.bnd.osgi.Constants;
import aQute.bnd.osgi.Descriptors.PackageRef;
import aQute.bnd.osgi.Descriptors.TypeRef;
import aQute.bnd.osgi.Instructions;
import aQute.bnd.osgi.Packages;
import aQute.bnd.service.diff.Delta;
import aQute.bnd.service.diff.Type;
import aQute.bnd.version.Version;
import aQute.lib.collections.MultiMap;
import aQute.libg.generics.Create;
/**
* An element that compares the access field in a binary compatible way. This
* element is used for classes, methods, constructors, and fields. For that
* reason we also included the only method that uses this class as a static
* method.
* <p>
* Packages
* <ul>
* <li>MAJOR - Remove a public type
* <li>MINOR - Add a public class
* <li>MINOR - Add an interface
* <li>MINOR - Add a method to a class
* <li>MINOR - Add a method to a provider interface
* <li>MAJOR - Add a method to a consumer interface
* <li>MINOR - Add a field
* <li>MICRO - Add an annotation to a member
* <li>MINOR - Change the value of a constant
* <li>MICRO - -abstract
* <li>MICRO - -final
* <li>MICRO - -protected
* <li>MAJOR - +abstract
* <li>MAJOR - +final
* <li>MAJOR - +protected
* </ul>
*/
class JavaElement {
final static EnumSet<Type> INHERITED = EnumSet.of(FIELD, METHOD, EXTENDS, IMPLEMENTS);
private static final Element PROTECTED = new Element(ACCESS, "protected", null, MAJOR, MINOR,
null);
private static final Element PROTECTED_PROVIDER = new Element(ACCESS, "protected", null, MINOR, MINOR,
null);
private static final Element STATIC = new Element(ACCESS, "static", null, MAJOR, MAJOR, null);
private static final Element ABSTRACT = new Element(ACCESS, "abstract", null, MAJOR, MINOR, null);
private static final Element FINAL = new Element(ACCESS, "final", null, MAJOR, MINOR, null);
// Common return type elements
static final Element VOID_R = new Element(RETURN, "void");
static final Element BOOLEAN_R = new Element(RETURN, "boolean");
static final Element BYTE_R = new Element(RETURN, "byte");
static final Element SHORT_R = new Element(RETURN, "short");
static final Element CHAR_R = new Element(RETURN, "char");
static final Element INT_R = new Element(RETURN, "int");
static final Element LONG_R = new Element(RETURN, "long");
static final Element FLOAT_R = new Element(RETURN, "float");
static final Element DOUBLE_R = new Element(RETURN, "double");
static final Element OBJECT_R = new Element(RETURN, "java.lang.Object");
final Analyzer analyzer;
final Map<PackageRef,Instructions> providerMatcher = Create.map();
final Set<TypeRef> notAccessible = Create.set();
final Map<Object,Element> cache = Create.map();
final MultiMap<PackageRef,Element> packages;
final Set<JAVA> javas = Create.set();
final Packages exports;
/**
* Create an element for the API. We take the exported packages and traverse
* those for their classes. If there is no manifest or it does not describe
* a bundle we assume the whole contents is exported.
*
* @param infos
*/
JavaElement(Analyzer analyzer) throws Exception {
this.analyzer = analyzer;
Manifest manifest = analyzer.getJar().getManifest();
if (manifest != null && manifest.getMainAttributes().getValue(Constants.BUNDLE_MANIFESTVERSION) != null) {
exports = new Packages();
for (Map.Entry<String,Attrs> entry : OSGiHeader
.parseHeader(manifest.getMainAttributes().getValue(Constants.EXPORT_PACKAGE)).entrySet())
exports.put(analyzer.getPackageRef(entry.getKey()), entry.getValue());
} else
exports = analyzer.getContained();
//
// We have to gather the -providers and parse them into instructions
// so we can efficiently match them during class parsing to find
// out who the providers and consumers are
//
for (Entry<PackageRef,Attrs> entry : exports.entrySet()) {
String value = entry.getValue().get(Constants.PROVIDER_TYPE_DIRECTIVE);
if (value != null) {
providerMatcher.put(entry.getKey(), new Instructions(value));
}
}
// we now need to gather all the packages but without
// creating the packages yet because we do not yet know
// which classes are accessible
packages = new MultiMap<PackageRef,Element>();
for (Clazz c : analyzer.getClassspace().values()) {
if (c.isSynthetic())
continue;
if (c.isPublic() || c.isProtected()) {
PackageRef packageName = c.getClassName().getPackageRef();
if (exports.containsKey(packageName)) {
Element cdef = classElement(c);
packages.add(packageName, cdef);
}
}
}
}
static Element getAPI(Analyzer analyzer) throws Exception {
analyzer.analyze();
JavaElement te = new JavaElement(analyzer);
return te.getLocalAPI();
}
private Element getLocalAPI() throws Exception {
Set<Element> result = new HashSet<Element>();
for (Map.Entry<PackageRef,List<Element>> entry : packages.entrySet()) {
List<Element> set = entry.getValue();
for (Iterator<Element> i = set.iterator(); i.hasNext();) {
if (notAccessible.contains(analyzer.getTypeRefFromFQN(i.next().getName())))
i.remove();
}
String version = exports.get(entry.getKey()).get(Constants.VERSION_ATTRIBUTE);
if (version != null) {
Version v = new Version(version);
set.add(new Element(VERSION, v.getWithoutQualifier().toString(), null, IGNORED, IGNORED, null));
}
Element pd = new Element(PACKAGE, entry.getKey().getFQN(), set, MINOR, MAJOR, null);
result.add(pd);
}
for (JAVA java : javas) {
result.add(new Element(CLASS_VERSION, java.toString(), null, CHANGED, CHANGED, null));
}
return new Element(API, "<api>", result, CHANGED, CHANGED, null);
}
/**
* Calculate the class element. This requires parsing the class file and
* finding all the methods that were added etc. The parsing will take super
* interfaces and super classes into account. For this reason it maintains a
* queue of classes/interfaces to parse.
*
* @param analyzer
* @param clazz
* @param infos
* @throws Exception
*/
Element classElement(final Clazz clazz) throws Exception {
Element e = cache.get(clazz);
if (e != null)
return e;
final Set<Element> members = Create.set();
final Set<MethodDef> methods = Create.set();
final Set<Clazz.FieldDef> fields = Create.set();
final MultiMap<Clazz.Def,Element> annotations = new MultiMap<Clazz.Def,Element>();
final TypeRef name = clazz.getClassName();
final String fqn = name.getFQN();
final String shortName = name.getShortName();
// Check if this clazz is actually a provider or not
// providers must be listed in the exported package in the
// PROVIDER_TYPE directive.
Instructions matchers = providerMatcher.get(name.getPackageRef());
boolean p = matchers != null && matchers.matches(shortName);
final AtomicBoolean provider = new AtomicBoolean(p);
//
// Check if we already had this clazz in the cache
//
Element before = cache.get(clazz); // for super classes
if (before != null)
return before;
clazz.parseClassFileWithCollector(new ClassDataCollector() {
boolean memberEnd;
Clazz.FieldDef last;
@Override
public void version(int minor, int major) {
javas.add(Clazz.JAVA.getJava(major, minor));
}
@Override
public void method(MethodDef defined) {
if ((defined.isProtected() || defined.isPublic())) {
last = defined;
methods.add(defined);
} else {
last = null;
}
}
@Override
public void deprecated() {
if (memberEnd)
clazz.setDeprecated(true);
else if (last != null)
last.setDeprecated(true);
}
@Override
public void field(Clazz.FieldDef defined) {
if (defined.isProtected() || defined.isPublic()) {
last = defined;
fields.add(defined);
} else
last = null;
}
@Override
public void constant(Object o) {
if (last != null) {
// Must be accessible now
last.setConstant(o);
}
}
@Override
public void extendsClass(TypeRef name) throws Exception {
String comment = null;
if (!clazz.isInterface())
comment = inherit(members, name);
Clazz c = analyzer.findClass(name);
if ((c == null || c.isPublic()) && !name.isObject())
members.add(new Element(EXTENDS, name.getFQN(), null, MICRO, MAJOR, comment));
}
@Override
public void implementsInterfaces(TypeRef names[]) throws Exception {
Arrays.sort(names); // ignore type reordering
for (TypeRef name : names) {
String comment = null;
if (clazz.isInterface() || clazz.isAbstract())
comment = inherit(members, name);
members.add(new Element(IMPLEMENTS, name.getFQN(), null, MINOR, MAJOR, comment));
}
}
/**
*/
Set<Element> OBJECT = Create.set();
public String inherit(final Set<Element> members, TypeRef name) throws Exception {
if (name.isObject()) {
if (OBJECT.isEmpty()) {
Clazz c = analyzer.findClass(name);
if (c == null) {
// Bnd fails on Java 9 class files #1598
// Caused by Java 9 not making class rsources
// available
return null;
}
Element s = classElement(c);
for (Element child : s.children) {
if (INHERITED.contains(child.type)) {
String n = child.getName();
if (child.type == METHOD) {
if (n.startsWith("<init>") || "getClass()".equals(child.getName())
|| n.startsWith("wait(") || n.startsWith("notify(")
|| n.startsWith("notifyAll("))
continue;
}
if (isStatic(child))
continue;
OBJECT.add(child);
}
}
}
members.addAll(OBJECT);
} else {
Clazz c = analyzer.findClass(name);
if (c == null) {
return inherit(members, analyzer.getTypeRef("java/lang/Object"));
}
Element s = classElement(c);
for (Element child : s.children) {
if (isStatic(child))
continue;
if (INHERITED.contains(child.type) && !child.name.startsWith("<")) {
members.add(child);
}
}
}
return null;
}
private boolean isStatic(Element child) {
boolean isStatic = child.get("static") != null;
return isStatic;
}
/**
* Deprecated annotations and Provider/Consumer Type (both bnd and
* OSGi) are treated special. Other annotations are turned into a
* tree. Starting with ANNOTATED, and then properties. A property is
* a PROPERTY property or an ANNOTATED property if it is an
* annotation. If it is an array, the key is suffixed with the
* index.
*
* <pre>
* public @interface Outer { Inner[] value(); }
* public @interface Inner { String[] value(); } @Outer(
* { @Inner("1","2"}) } class Xyz {} ANNOTATED Outer
* (CHANGED/CHANGED) ANNOTATED Inner (CHANGED/CHANGED) PROPERTY
* value.0=1 (CHANGED/CHANGED) PROPERTY value.1=2 (CHANGED/CHANGED)
* </pre>
*/
@Override
public void annotation(Annotation annotation) {
if (Deprecated.class.getName().equals(annotation.getName().getFQN())) {
if (memberEnd)
clazz.setDeprecated(true);
else if (last != null)
last.setDeprecated(true);
return;
}
Element e = annotatedToElement(annotation);
if (memberEnd) {
members.add(e);
//
// Check for the provider/consumer. We use strings because
// these are not officially
// released yet
//
String name = annotation.getName().getFQN();
if ("aQute.bnd.annotation.ProviderType".equals(name)
|| "org.osgi.annotation.versioning.ProviderType".equals(name)) {
provider.set(true);
} else if ("aQute.bnd.annotation.ConsumerType".equals(name)
|| "org.osgi.annotation.versioning.ConsumerType".equals(name)) {
provider.set(false);
}
} else if (last != null)
annotations.add(last, e);
}
/*
* Return an ANNOTATED element for this annotation. An ANNOTATED
* element contains either PROPERTY children or ANNOTATED children.
*/
private Element annotatedToElement(Annotation annotation) {
Collection<Element> properties = Create.set();
for (String key : annotation.keySet()) {
addAnnotationMember(properties, key, annotation.get(key));
}
return new Element(ANNOTATED, annotation.getName().getFQN(), properties, CHANGED, CHANGED, null);
}
/*
* This method detects 3 cases: An Annotation, which means it
* creates a new child ANNOTATED element, an array, which means it
* will repeat recursively but suffixes the key with the index, or a
* simple value which is turned into a string.
*/
private void addAnnotationMember(Collection<Element> properties, String key, Object member) {
if (member instanceof Annotation) {
properties.add(annotatedToElement((Annotation) member));
} else if (member.getClass().isArray()) {
int l = Array.getLength(member);
for (int i = 0; i < l; i++) {
addAnnotationMember(properties, key + "." + i, Array.get(member, i));
}
} else {
StringBuilder sb = new StringBuilder();
sb.append(key);
sb.append('=');
if (member instanceof String) {
sb.append("'");
sb.append(member);
sb.append("'");
} else
sb.append(member);
properties.add(new Element(PROPERTY, sb.toString(), null, CHANGED, CHANGED, null));
}
}
@Override
public void innerClass(TypeRef innerClass, TypeRef outerClass, String innerName, int innerClassAccessFlags)
throws Exception {
Clazz clazz = analyzer.findClass(innerClass);
if (clazz != null)
clazz.setInnerAccess(innerClassAccessFlags);
if (Modifier.isProtected(innerClassAccessFlags) || Modifier.isPublic(innerClassAccessFlags))
return;
notAccessible.add(innerClass);
}
@Override
public void memberEnd() {
memberEnd = true;
}
});
// This is the heart of the semantic versioning. If we
// add or remove a method from an interface then
Delta add;
Delta remove;
Type type;
// Calculate the type of the clazz. A class
// can be an interface, class, enum, or annotation
if (clazz.isInterface())
if (clazz.isAnnotation())
type = ANNOTATION;
else
type = INTERFACE;
else if (clazz.isEnum())
type = ENUM;
else
type = CLASS;
if (type == INTERFACE) {
if (provider.get()) {
// Adding a method for a provider is not an issue
// because it must be aware of the changes
add = MINOR;
// Removing a method influences consumers since they
// tend to call this guy.
remove = MAJOR;
} else {
// Adding a method is a major change
// because the consumer has to implement it
// or the provider will call a non existent
// method on the consumer
add = MAJOR;
// Removing a method is not an issue for
// providers, however, consumers could potentially
// call through this interface :-(
remove = MAJOR;
}
} else {
// Adding a method to a class can never do any harm
// except when the class is extended and the new
// method clashes with the new method. That is
// why API classes in general should be final, at
// least not extended by consumers.
add = MINOR;
// Removing it will likely hurt consumers
remove = MAJOR;
}
for (MethodDef m : methods) {
if (m.isSynthetic()) { // Ignore synthetic methods
continue;
}
Collection<Element> children = annotations.get(m);
if (children == null)
children = new HashSet<Element>();
access(children, m.getAccess(), m.isDeprecated(), provider.get());
// A final class cannot be extended, ergo,
// all methods defined in it are by definition
// final. However, marking them final (either
// on the method or inheriting it from the class)
// will create superfluous changes if we
// override a method from a super class that was not
// final. So we actually remove the final for methods
// in a final class.
if (clazz.isFinal())
children.remove(FINAL);
children.add(getReturn(m.getType()));
//
// Java default methods are concrete implementations of methods
// on an interface.
//
if (clazz.isInterface() && !m.isAbstract()) {
//
// We have a Java 8 default method!
// Such a method is always a minor update
//
add = MINOR;
}
String signature = m.getName() + toString(m.getPrototype());
Element member = new Element(METHOD, signature, children, add,
provider.get() && !m.isPublic() ? MINOR : remove, null);
if (!members.add(member)) {
members.remove(member);
members.add(member);
}
}
for (Clazz.FieldDef f : fields) {
if (f.isSynthetic()) { // Ignore synthetic fields
continue;
}
Collection<Element> children = annotations.get(f);
if (children == null)
children = new HashSet<Element>();
// Fields can have a constant value, this is a new element
if (f.getConstant() != null) {
children.add(new Element(CONSTANT, f.getConstant().toString(), null, CHANGED, CHANGED, null));
}
access(children, f.getAccess(), f.isDeprecated(), provider.get());
children.add(getReturn(f.getType()));
Element member = new Element(FIELD, f.getName(), children, MINOR,
provider.get() && !f.isPublic() ? MINOR : MAJOR, null);
if (!members.add(member)) {
members.remove(member);
members.add(member);
}
}
access(members, clazz.getAccess(), clazz.isDeprecated(), provider.get());
// And make the result
Element s = new Element(type, fqn, members, MINOR, MAJOR, null);
cache.put(clazz, s);
return s;
}
private String toString(TypeRef[] prototype) {
StringBuilder sb = new StringBuilder();
sb.append("(");
String del = "";
for (TypeRef ref : prototype) {
sb.append(del);
sb.append(ref.getFQN());
del = ",";
}
sb.append(")");
return sb.toString();
}
private Element getReturn(TypeRef type) {
if (!type.isPrimitive()) {
return type.isObject() ? OBJECT_R : new Element(RETURN, type.getFQN());
}
switch (type.getBinary().charAt(0)) {
case 'V' :
return VOID_R;
case 'Z' :
return BOOLEAN_R;
case 'S' :
return SHORT_R;
case 'I' :
return INT_R;
case 'B' :
return BYTE_R;
case 'C' :
return CHAR_R;
case 'J' :
return LONG_R;
case 'F' :
return FLOAT_R;
case 'D' :
return DOUBLE_R;
default :
throw new IllegalArgumentException("Unknown primitive " + type);
}
}
private static void access(Collection<Element> children, int access, @SuppressWarnings("unused") boolean deprecated,
boolean provider) {
if (!Modifier.isPublic(access))
children.add(provider ? PROTECTED_PROVIDER : PROTECTED);
if (Modifier.isAbstract(access))
children.add(ABSTRACT);
if (Modifier.isFinal(access))
children.add(FINAL);
if (Modifier.isStatic(access))
children.add(STATIC);
}
}
| |
package com.versionone.om.tests;
import com.versionone.apiclient.APIException;
import com.versionone.apiclient.ConnectionException;
import com.versionone.apiclient.FilterTerm;
import com.versionone.apiclient.IAssetType;
import com.versionone.apiclient.OidException;
import com.versionone.apiclient.Query;
import com.versionone.om.Epic;
import com.versionone.om.Member;
import com.versionone.om.Retrospective;
import com.versionone.om.Story;
import com.versionone.om.filters.BaseAssetFilter;
import com.versionone.om.filters.StoryFilter;
import org.junit.Assert;
import org.junit.Ignore;
import static org.junit.Assert.fail;
import org.junit.Test;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
public class StoryFilterTester extends PrimaryWorkitemFilterTesterBase {
private StoryFilter getFilter() {
StoryFilter filter = new StoryFilter();
filter.project.add(sandboxProject);
return filter;
}
@Test
public void testProject() {
Collection<Story> stories = getInstance().get().story(
getFilter());
for (Story result : stories) {
Assert.assertEquals(getSandboxProject(), result.getProject());
}
}
@Test
public void testNoOwner() {
StoryFilter filter = getFilter();
filter.owners.add(null);
Collection<Story> stories = getInstance().get().story(filter);
for (Story result : stories) {
if (result.getOwners().size() > 0) {
fail("Filtered Query should only return stories owned by no one.");
}
}
}
@Test
public void testFilterExist() throws ConnectionException, APIException, OidException {
IAssetType assetType = getInstance().getApiClient().getMetaModel().getAssetType("Story");
FilterTerm customFilter = new FilterTerm(assetType.getAttributeDefinition("Timebox"));
customFilter.exists();
Query query = new Query(assetType);
query.setFilter(customFilter);
getInstance().getApiClient().getServices().retrieve(query);
}
@Test
public void testNoOrAndreOwner() {
StoryFilter filter = getFilter();
filter.owners.add(null);
filter.owners.add(andre);
Collection<Story> stories = getInstance().get().story(filter);
for (Story result : stories) {
if (!findRelated(andre, result.getOwners())
&& (result.getOwners().size() > 0)) {
fail("Filtered Query should only return stories owned by "
+ andre.getName() + " or no one.");
}
}
}
@Test
public void testNames() {
StoryFilter filter = getFilter();
filter.name.add("Defect 2");
filter.name.add("Story 2");
Collection<Story> stories = getInstance().get().story(filter);
Assert.assertEquals(1, stories.size());
}
@Test
public void testDispalyIDs() {
StoryFilter filter = getFilter();
filter.displayID.add(story1.getDisplayID());
filter.displayID.add(defect1.getDisplayID());
Collection<Story> stories = getInstance().get().story(filter);
Assert.assertEquals(1, stories.size());
}
@Test
public void testState() {
Collection<Story> stories = getInstance().get().story(
getFilter());
int allStoriesCount = stories.size();
Story closedStory = sandboxProject.createStory("Close Me");
closedStory.close();
Assert.assertEquals(++allStoriesCount, getInstance().get()
.story(getFilter()).size());
StoryFilter openFilter = getFilter();
openFilter.getState().add(BaseAssetFilter.State.Active);
Collection<Story> activeStories = getInstance().get().story(
openFilter);
Assert.assertEquals(allStoriesCount - 1, activeStories.size());
for (Story story : activeStories) {
Assert.assertTrue(story.isActive());
}
StoryFilter closedFilter = getFilter();
closedFilter.getState().add(BaseAssetFilter.State.Closed);
Collection<Story> closedStories = getInstance().get().story(
closedFilter);
Assert.assertEquals(1, closedStories.size());
for (Story story : closedStories) {
Assert.assertTrue(story.isClosed());
}
}
@Test
public void testRequestedBy() {
final String strMe = "ME";
Story story = getSandboxProject().createStory("RequestdBy Filter");
story.setRequestedBy(strMe);
story.save();
resetInstance();
story = getInstance().get().storyByID(story.getID());
StoryFilter filter = new StoryFilter();
filter.requestedBy.add(strMe);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(story, results));
for (Story result : results) {
Assert.assertEquals(strMe, result.getRequestedBy());
}
}
@Test
public void testBuild() {
final String strBuildNumber = "10.2.24.1";
Story story = getSandboxProject().createStory("Build Filter");
story.setBuild(strBuildNumber);
story.save();
resetInstance();
story = getInstance().get().storyByID(story.getID());
StoryFilter filter = getFilter();
filter.build.add(strBuildNumber);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(story, results));
for (Story result : results) {
Assert.assertEquals(strBuildNumber, result.getBuild());
}
}
@Test
public void testEpic() {
Epic epic = getInstance().create().epic("Filter by me",
getSandboxProject());
Story story = epic.generateChildStory();
story.setName("Find Me");
story.save();
resetInstance();
story = getInstance().get().storyByID(story.getID());
epic = getInstance().get().epicByID(epic.getID());
StoryFilter filter = getFilter();
filter.epic.add(epic);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(story, results));
for (Story result : results) {
Assert.assertEquals(epic, result.getEpic());
}
}
@Test
public void testRisk() {
Story story = getSandboxProject().createStory("Risk Filter");
String riskValue = story.getRisk().getAllValues()[0];
story.getRisk().setCurrentValue(riskValue);
story.save();
resetInstance();
story = getInstance().get().storyByID(story.getID());
StoryFilter filter = getFilter();
filter.risk.add(riskValue);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(story, results));
for (Story result : results) {
Assert.assertEquals(riskValue, result.getRisk().getCurrentValue());
}
}
@Test
public void testType() {
Story story = getSandboxProject().createStory("Type Filter");
String typeValue = story.getType().getAllValues()[0];
story.getType().setCurrentValue(typeValue);
story.save();
resetInstance();
story = getInstance().get().storyByID(story.getID());
StoryFilter filter = getFilter();
filter.type.add(typeValue);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(story, results));
for (Story result : results) {
Assert.assertEquals(typeValue, result.getType().getCurrentValue());
}
}
@Test
public void testCustomer() {
Member customer = null;
for (Member member : getInstance().getMembers()) {
customer = member;
break;
}
Story story = getSandboxProject().createStory("Customer filter");
story.setCustomer(customer);
story.save();
resetInstance();
story = getInstance().get().storyByID(story.getID());
customer = getInstance().get().memberByID(customer.getID());
StoryFilter filter = getFilter();
filter.customer.add(customer);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(story, results));
for (Story result : results) {
Assert.assertEquals(customer, result.getCustomer());
}
}
@Test
public void testDependsOnStories() {
Story benefactor = getSandboxProject().createStory("Benefactor");
Story dependant = getSandboxProject().createStory("Dependant");
dependant.getDependsOnStories().add(benefactor);
dependant.save();
resetInstance();
dependant = getInstance().get().storyByID(dependant.getID());
benefactor = getInstance().get().storyByID(benefactor.getID());
StoryFilter filter = getFilter();
filter.dependsOnStories.add(benefactor);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(dependant, results));
for (Story result : results) {
Assert.assertTrue(
"Expected story to depend on value used in filter",
findRelated(benefactor, result.getDependsOnStories()));
}
}
@Test
public void testDependantStories() {
Story benefactor = getSandboxProject().createStory("Benefactor");
Story dependant = getSandboxProject().createStory("Dependant");
dependant.getDependsOnStories().add(benefactor);
dependant.save();
resetInstance();
dependant = getInstance().get().storyByID(dependant.getID());
benefactor = getInstance().get().storyByID(benefactor.getID());
StoryFilter filter = getFilter();
filter.dependentStories.add(dependant);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(benefactor, results));
for (Story result : results) {
Assert.assertTrue("Expected story to includ value used in filter in dependant stories",
findRelated(dependant, result.getDependentStories()));
}
}
@Test
public void testIdentifiedIn() {
Retrospective retro = getSandboxProject().createRetrospective("Has a story");
Story story = retro.createStory("Retrospective filter");
story.save();
resetInstance();
story = getInstance().get().storyByID(story.getID());
retro = getInstance().get().retrospectiveByID(retro.getID());
StoryFilter filter = getFilter();
filter.identifiedIn.add(retro);
Collection<Story> results = getSandboxProject().getStories(filter);
Assert.assertTrue("Expected to find story that matched filter.",
findRelated(story, results));
for (Story result : results) {
Assert.assertEquals(retro, result.getIdentifiedIn());
}
}
@Test
@Ignore("DB index text for search not so fast, so this test fails. Requires big delay.")
public void findInDefaultFields() {
String nameString = newGuid();
getSandboxProject().createStory(nameString);
StoryFilter filter = getFilter();
filter.find.setSearchString(nameString);
//we need this sleep to let DB index string for search.
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
}
Assert.assertEquals(1, getInstance().get().story(filter).size());
}
@Test
@Ignore("Server throws NotSupportedException: SimpleLongTextAttributeDefinition.BuildPredicate (Story.Description)")
public void filterByDescription() {
String weirdString = newGuid();
Story story = getEntityFactory().createStory("my test story for description", getSandboxProject());
story.setDescription(weirdString);
resetInstance();
StoryFilter filter = new StoryFilter();
filter.description.add(weirdString);
//we need this sleep to let DB index string for search.
try {
Thread.sleep(2000);
} catch (InterruptedException e) {}
Assert.assertEquals(1, getInstance().get().story(filter).size());
}
@Test
@Ignore("DB index text for search not so fast, so this test fails. Requires big delay.")
public void findInDescriptionField() {
String weirdString = newGuid();
Story a = getSandboxProject().createStory("Has a weird description");
a.setDescription(weirdString);
Story b = getSandboxProject().createStory("Also with funky data");
b.setDescription(weirdString);
a.save();
b.save();
StoryFilter filter = getFilter();
filter.find.setSearchString(weirdString);
filter.find.fields.add("Description");
//we need this sleep to let DB index string for search.
try {
Thread.sleep(10000);
} catch (InterruptedException e) {}
Assert.assertEquals(2, getInstance().get().story(filter).size());
}
@Test
public void testFindInDescriptionNotFound() {
StoryFilter filter = getFilter();
filter.find.setSearchString(newGuid());
filter.find.fields.add("Description");
try {
Thread.sleep(10000);
} catch (InterruptedException e) {}
Assert.assertEquals(0, getInstance().get().story(filter).size());
}
@Test
public void testNoProjectAmongStories() {
String sandboxName = getSandboxProject().getName();
resetInstance();
ListAssert.notcontains(sandboxName,
getInstance().get().baseAssets(new StoryFilter()),
new EntityToNameTransformer<Story>());
}
@Test
public void testNoEpicAmongStories() {
Epic epic = getSandboxProject().createEpic("War and Piece");
resetInstance();
ListAssert.notcontains(epic.getName(), getInstance().get().story(null), new EntityToNameTransformer<Story>());
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.editorActions;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.codeInsight.daemon.impl.CollectHighlightsUtil;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.hint.HintManagerImpl;
import com.intellij.codeInsight.hint.HintUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.LightweightHint;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
public abstract class CopyPasteReferenceProcessor<TRef extends PsiElement> extends CopyPastePostProcessor<ReferenceTransferableData> {
private static final Logger LOG = Logger.getInstance(CopyPasteReferenceProcessor.class);
@NotNull
@Override
public List<ReferenceTransferableData> collectTransferableData(PsiFile file, final Editor editor, final int[] startOffsets, final int[] endOffsets) {
if (CodeInsightSettings.getInstance().ADD_IMPORTS_ON_PASTE == CodeInsightSettings.NO) {
return Collections.emptyList();
}
if (!(file instanceof PsiClassOwner)) {
return Collections.emptyList();
}
final ArrayList<ReferenceData> array = new ArrayList<>();
int refOffset = 0; // this is an offset delta for conversion from absolute offset to an offset inside clipboard contents
for (int j = 0; j < startOffsets.length; j++) {
refOffset += startOffsets[j];
for (final PsiElement element : CollectHighlightsUtil.getElementsInRange(file, startOffsets[j], endOffsets[j])) {
addReferenceData(file, refOffset, element, array);
}
refOffset -= endOffsets[j] + 1; // 1 accounts for line break inserted between contents corresponding to different carets
}
if (array.isEmpty()) {
return Collections.emptyList();
}
return Collections.singletonList(new ReferenceTransferableData(array.toArray(new ReferenceData[0])));
}
protected abstract void addReferenceData(PsiFile file, int startOffset, PsiElement element, ArrayList<ReferenceData> to);
@NotNull
@Override
public List<ReferenceTransferableData> extractTransferableData(final Transferable content) {
ReferenceTransferableData referenceData = null;
if (CodeInsightSettings.getInstance().ADD_IMPORTS_ON_PASTE != CodeInsightSettings.NO) {
try {
final DataFlavor flavor = ReferenceData.getDataFlavor();
if (flavor != null) {
referenceData = (ReferenceTransferableData)content.getTransferData(flavor);
}
}
catch (UnsupportedFlavorException | IOException ignored) {
}
}
if (referenceData != null) { // copy to prevent changing of original by convertLineSeparators
return Collections.singletonList(referenceData.clone());
}
return Collections.emptyList();
}
@Override
public void processTransferableData(final Project project,
final Editor editor,
final RangeMarker bounds,
int caretOffset,
Ref<Boolean> indented, final List<ReferenceTransferableData> values) {
if (DumbService.getInstance(project).isDumb()) {
return;
}
final Document document = editor.getDocument();
final PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(document);
if (!(file instanceof PsiClassOwner)) {
return;
}
PsiDocumentManager.getInstance(project).commitAllDocuments();
assert values.size() == 1;
final ReferenceData[] referenceData = values.get(0).getData();
final TRef[] refs = findReferencesToRestore(file, bounds, referenceData);
if (CodeInsightSettings.getInstance().ADD_IMPORTS_ON_PASTE == CodeInsightSettings.ASK) {
askReferencesToRestore(project, refs, referenceData);
}
PsiDocumentManager.getInstance(project).commitAllDocuments();
ApplicationManager.getApplication().runWriteAction(() -> {
Set<String> imported = new TreeSet<>();
restoreReferences(referenceData, refs, imported);
if (CodeInsightSettings.getInstance().ADD_IMPORTS_ON_PASTE == CodeInsightSettings.YES && !imported.isEmpty()) {
String notificationText = CodeInsightBundle.message("copy.paste.reference.notification", imported.size());
ApplicationManager.getApplication().invokeLater(
() -> showHint(editor, notificationText, e -> {
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
reviewImports(project, file, imported);
}
}), ModalityState.NON_MODAL);
}
});
}
protected abstract void removeImports(PsiFile file, Set<String> imports);
private void reviewImports(Project project, PsiFile file, Set<String> importedClasses) {
RestoreReferencesDialog dialog = new RestoreReferencesDialog(project, ArrayUtil.toObjectArray(importedClasses), false);
dialog.setTitle(CodeInsightBundle.message("dialog.import.on.paste.title3"));
dialog.setExplanation(CodeInsightBundle.message("dialog.paste.on.import.text3"));
if (dialog.showAndGet()) {
Object[] selectedElements = dialog.getSelectedElements();
if (selectedElements.length > 0) {
WriteCommandAction.runWriteCommandAction(project, "", null, () -> {
removeImports(file, Arrays.stream(selectedElements).map(o -> (String)o).collect(Collectors.toSet()));
});
}
}
}
protected static void addReferenceData(final PsiElement element,
final ArrayList<? super ReferenceData> array,
final int startOffset,
final String qClassName, @Nullable final String staticMemberName) {
final TextRange range = element.getTextRange();
array.add(
new ReferenceData(
range.getStartOffset() - startOffset,
range.getEndOffset() - startOffset,
qClassName, staticMemberName));
}
protected abstract TRef @NotNull [] findReferencesToRestore(PsiFile file,
RangeMarker bounds,
ReferenceData[] referenceData);
protected PsiElement resolveReferenceIgnoreOverriding(PsiPolyVariantReference reference) {
PsiElement referent = reference.resolve();
if (referent == null) {
final ResolveResult[] results = reference.multiResolve(true);
if (results.length > 0) {
referent = results[0].getElement();
}
}
return referent;
}
protected abstract void restoreReferences(ReferenceData[] referenceData,
TRef[] refs,
Set<String> imported);
private static void askReferencesToRestore(Project project, PsiElement @NotNull [] refs,
ReferenceData[] referenceData) {
PsiManager manager = PsiManager.getInstance(project);
ArrayList<Object> array = new ArrayList<>();
Object[] refObjects = new Object[refs.length];
for (int i = 0; i < referenceData.length; i++) {
PsiElement ref = refs[i];
if (ref != null) {
LOG.assertTrue(ref.isValid());
ReferenceData data = referenceData[i];
PsiClass refClass = JavaPsiFacade.getInstance(manager.getProject()).findClass(data.qClassName, ref.getResolveScope());
if (refClass == null) continue;
Object refObject = refClass;
if (data.staticMemberName != null) {
//Show static members as Strings
refObject = refClass.getQualifiedName() + "." + data.staticMemberName;
}
refObjects[i] = refObject;
if (!array.contains(refObject)) {
array.add(refObject);
}
}
}
if (array.isEmpty()) return;
Object[] selectedObjects = ArrayUtil.toObjectArray(array);
Arrays.sort(selectedObjects, (o1, o2) -> getFQName(o1).compareToIgnoreCase(getFQName(o2)));
RestoreReferencesDialog dialog = new RestoreReferencesDialog(project, selectedObjects);
dialog.show();
selectedObjects = dialog.getSelectedElements();
for (int i = 0; i < referenceData.length; i++) {
PsiElement ref = refs[i];
if (ref != null) {
PsiUtilCore.ensureValid(ref);
Object refObject = refObjects[i];
boolean found = false;
for (Object selected : selectedObjects) {
if (Comparing.equal(refObject, selected)) {
found = true;
break;
}
}
if (!found) {
refs[i] = null;
}
}
}
}
private static void showHint(@NotNull Editor editor, @NotNull String info, HyperlinkListener hyperlinkListener) {
if (ApplicationManager.getApplication().isUnitTestMode()) return;
LightweightHint hint = new LightweightHint(HintUtil.createInformationLabel(info, hyperlinkListener, null, null));
int flags = HintManager.HIDE_BY_ANY_KEY | HintManager.HIDE_BY_TEXT_CHANGE;
HintManagerImpl.getInstanceImpl().showEditorHint(hint, editor, HintManager.UNDER, flags, 0, false);
}
private static String getFQName(Object element) {
return element instanceof PsiClass ? ((PsiClass)element).getQualifiedName() : (String)element;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.curator.framework.recipes.queue;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.curator.utils.CloseableUtils;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.api.BackgroundCallback;
import org.apache.curator.framework.api.CuratorEvent;
import org.apache.curator.framework.api.CuratorEventType;
import org.apache.curator.framework.imps.CuratorFrameworkState;
import org.apache.curator.framework.listen.ListenerContainer;
import org.apache.curator.framework.recipes.leader.LeaderSelector;
import org.apache.curator.utils.ThreadUtils;
import org.apache.curator.utils.ZKPaths;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.curator.utils.PathUtils;
/**
* <p>An implementation of the Distributed Queue ZK recipe. Items put into the queue
* are guaranteed to be ordered (by means of ZK's PERSISTENT_SEQUENTIAL node).</p>
*
* <p>
* Guarantees:</p>
* <ul>
* <li>If a single consumer takes items out of the queue, they will be ordered FIFO. i.e. if ordering is important,
* use a {@link LeaderSelector} to nominate a single consumer.</li>
* <li>Unless a {@link QueueBuilder#lockPath(String)} is used, there is only guaranteed processing of each message to the point of receipt by a given instance.
* <li>If an instance receives an item from the queue but dies while processing it, the item will be lost. If you need message recoverability, use
* a {@link QueueBuilder#lockPath(String)}</li>
* </ul>
*/
public class DistributedQueue<T> implements QueueBase<T>
{
private final Logger log = LoggerFactory.getLogger(getClass());
private final CuratorFramework client;
private final QueueSerializer<T> serializer;
private final String queuePath;
private final Executor executor;
private final ExecutorService service;
private final AtomicReference<State> state = new AtomicReference<State>(State.LATENT);
private final QueueConsumer<T> consumer;
private final int minItemsBeforeRefresh;
private final boolean refreshOnWatch;
private final boolean isProducerOnly;
private final String lockPath;
private final AtomicReference<ErrorMode> errorMode = new AtomicReference<ErrorMode>(ErrorMode.REQUEUE);
private final ListenerContainer<QueuePutListener<T>> putListenerContainer = new ListenerContainer<QueuePutListener<T>>();
private final AtomicInteger lastChildCount = new AtomicInteger(0);
private final int maxItems;
private final int finalFlushMs;
private final boolean putInBackground;
private final ChildrenCache childrenCache;
private final AtomicInteger putCount = new AtomicInteger(0);
private enum State
{
LATENT,
STARTED,
STOPPED
}
@VisibleForTesting
protected enum ProcessType
{
NORMAL,
REMOVE
}
private static final String QUEUE_ITEM_NAME = "queue-";
DistributedQueue
(
CuratorFramework client,
QueueConsumer<T> consumer,
QueueSerializer<T> serializer,
String queuePath,
ThreadFactory threadFactory,
Executor executor,
int minItemsBeforeRefresh,
boolean refreshOnWatch,
String lockPath,
int maxItems,
boolean putInBackground,
int finalFlushMs
)
{
Preconditions.checkNotNull(client, "client cannot be null");
Preconditions.checkNotNull(serializer, "serializer cannot be null");
Preconditions.checkNotNull(threadFactory, "threadFactory cannot be null");
Preconditions.checkNotNull(executor, "executor cannot be null");
Preconditions.checkArgument(maxItems > 0, "maxItems must be a positive number");
isProducerOnly = (consumer == null);
this.lockPath = (lockPath == null) ? null : PathUtils.validatePath(lockPath);
this.putInBackground = putInBackground;
this.consumer = consumer;
this.minItemsBeforeRefresh = minItemsBeforeRefresh;
this.refreshOnWatch = refreshOnWatch;
this.client = client;
this.serializer = serializer;
this.queuePath = PathUtils.validatePath(queuePath);
this.executor = executor;
this.maxItems = maxItems;
this.finalFlushMs = finalFlushMs;
service = Executors.newFixedThreadPool(2, threadFactory);
childrenCache = new ChildrenCache(client, queuePath);
if ( (maxItems != QueueBuilder.NOT_SET) && putInBackground )
{
log.warn("Bounded queues should set putInBackground(false) in the builder. Putting in the background will result in spotty maxItem consistency.");
}
}
/**
* Start the queue. No other methods work until this is called
*
* @throws Exception startup errors
*/
@Override
public void start() throws Exception
{
if ( !state.compareAndSet(State.LATENT, State.STARTED) )
{
throw new IllegalStateException();
}
try
{
client.create().creatingParentContainersIfNeeded().forPath(queuePath);
}
catch ( KeeperException.NodeExistsException ignore )
{
// this is OK
}
if ( lockPath != null )
{
try
{
client.create().creatingParentContainersIfNeeded().forPath(lockPath);
}
catch ( KeeperException.NodeExistsException ignore )
{
// this is OK
}
}
if ( !isProducerOnly || (maxItems != QueueBuilder.NOT_SET) )
{
childrenCache.start();
}
if ( !isProducerOnly )
{
service.submit
(
new Callable<Object>()
{
@Override
public Object call()
{
runLoop();
return null;
}
}
);
}
}
@Override
public void close() throws IOException
{
if ( state.compareAndSet(State.STARTED, State.STOPPED) )
{
if ( finalFlushMs > 0 )
{
try
{
flushPuts(finalFlushMs, TimeUnit.MILLISECONDS);
}
catch ( InterruptedException e )
{
Thread.currentThread().interrupt();
}
}
CloseableUtils.closeQuietly(childrenCache);
putListenerContainer.clear();
service.shutdownNow();
}
}
/**
* Return the manager for put listeners
*
* @return put listener container
*/
@Override
public ListenerContainer<QueuePutListener<T>> getPutListenerContainer()
{
return putListenerContainer;
}
/**
* Used when the queue is created with a {@link QueueBuilder#lockPath(String)}. Determines
* the behavior when the queue consumer throws an exception
*
* @param newErrorMode the new error mode (the default is {@link ErrorMode#REQUEUE}
*/
@Override
public void setErrorMode(ErrorMode newErrorMode)
{
Preconditions.checkNotNull(lockPath, "lockPath cannot be null");
if ( newErrorMode == ErrorMode.REQUEUE )
{
log.warn("ErrorMode.REQUEUE requires ZooKeeper version 3.4.x+ - make sure you are not using a prior version");
}
errorMode.set(newErrorMode);
}
/**
* Wait until any pending puts are committed
*
* @param waitTime max wait time
* @param timeUnit time unit
* @return true if the flush was successful, false if it timed out first
* @throws InterruptedException if thread was interrupted
*/
@Override
public boolean flushPuts(long waitTime, TimeUnit timeUnit) throws InterruptedException
{
long msWaitRemaining = TimeUnit.MILLISECONDS.convert(waitTime, timeUnit);
synchronized(putCount)
{
while ( putCount.get() > 0 )
{
if ( msWaitRemaining <= 0 )
{
return false;
}
long startMs = System.currentTimeMillis();
putCount.wait(msWaitRemaining);
long elapsedMs = System.currentTimeMillis() - startMs;
msWaitRemaining -= elapsedMs;
}
}
return true;
}
/**
* Add an item into the queue. Adding is done in the background - thus, this method will
* return quickly.<br><br>
* NOTE: if an upper bound was set via {@link QueueBuilder#maxItems}, this method will
* block until there is available space in the queue.
*
* @param item item to add
* @throws Exception connection issues
*/
public void put(T item) throws Exception
{
put(item, 0, null);
}
/**
* Same as {@link #put(Object)} but allows a maximum wait time if an upper bound was set
* via {@link QueueBuilder#maxItems}.
*
* @param item item to add
* @param maxWait maximum wait
* @param unit wait unit
* @return true if items was added, false if timed out
* @throws Exception
*/
public boolean put(T item, int maxWait, TimeUnit unit) throws Exception
{
checkState();
String path = makeItemPath();
return internalPut(item, null, path, maxWait, unit);
}
/**
* Add a set of items into the queue. Adding is done in the background - thus, this method will
* return quickly.<br><br>
* NOTE: if an upper bound was set via {@link QueueBuilder#maxItems}, this method will
* block until there is available space in the queue.
*
* @param items items to add
* @throws Exception connection issues
*/
public void putMulti(MultiItem<T> items) throws Exception
{
putMulti(items, 0, null);
}
/**
* Same as {@link #putMulti(MultiItem)} but allows a maximum wait time if an upper bound was set
* via {@link QueueBuilder#maxItems}.
*
* @param items items to add
* @param maxWait maximum wait
* @param unit wait unit
* @return true if items was added, false if timed out
* @throws Exception
*/
public boolean putMulti(MultiItem<T> items, int maxWait, TimeUnit unit) throws Exception
{
checkState();
String path = makeItemPath();
return internalPut(null, items, path, maxWait, unit);
}
/**
* Return the most recent message count from the queue. This is useful for debugging/information
* purposes only.
*
* @return count (can be 0)
*/
@Override
public int getLastMessageCount()
{
return lastChildCount.get();
}
boolean internalPut(final T item, MultiItem<T> multiItem, String path, int maxWait, TimeUnit unit) throws Exception
{
if ( !blockIfMaxed(maxWait, unit) )
{
return false;
}
final MultiItem<T> givenMultiItem = multiItem;
if ( item != null )
{
final AtomicReference<T> ref = new AtomicReference<T>(item);
multiItem = new MultiItem<T>()
{
@Override
public T nextItem() throws Exception
{
return ref.getAndSet(null);
}
};
}
putCount.incrementAndGet();
byte[] bytes = ItemSerializer.serialize(multiItem, serializer);
if ( putInBackground )
{
doPutInBackground(item, path, givenMultiItem, bytes);
}
else
{
doPutInForeground(item, path, givenMultiItem, bytes);
}
return true;
}
private void doPutInForeground(final T item, String path, final MultiItem<T> givenMultiItem, byte[] bytes) throws Exception
{
client.create().withMode(CreateMode.PERSISTENT_SEQUENTIAL).forPath(path, bytes);
synchronized(putCount)
{
putCount.decrementAndGet();
putCount.notifyAll();
}
putListenerContainer.forEach
(
new Function<QueuePutListener<T>, Void>()
{
@Override
public Void apply(QueuePutListener<T> listener)
{
if ( item != null )
{
listener.putCompleted(item);
}
else
{
listener.putMultiCompleted(givenMultiItem);
}
return null;
}
}
);
}
private void doPutInBackground(final T item, String path, final MultiItem<T> givenMultiItem, byte[] bytes) throws Exception
{
BackgroundCallback callback = new BackgroundCallback()
{
@Override
public void processResult(CuratorFramework client, CuratorEvent event) throws Exception
{
if ( event.getResultCode() != KeeperException.Code.OK.intValue() )
{
return;
}
if ( event.getType() == CuratorEventType.CREATE )
{
synchronized(putCount)
{
putCount.decrementAndGet();
putCount.notifyAll();
}
}
putListenerContainer.forEach
(
new Function<QueuePutListener<T>, Void>()
{
@Override
public Void apply(QueuePutListener<T> listener)
{
if ( item != null )
{
listener.putCompleted(item);
}
else
{
listener.putMultiCompleted(givenMultiItem);
}
return null;
}
}
);
}
};
internalCreateNode(path, bytes, callback);
}
@VisibleForTesting
void internalCreateNode(String path, byte[] bytes, BackgroundCallback callback) throws Exception
{
client.create().withMode(CreateMode.PERSISTENT_SEQUENTIAL).inBackground(callback).forPath(path, bytes);
}
void checkState() throws Exception
{
if ( state.get() != State.STARTED )
{
throw new IllegalStateException();
}
}
String makeItemPath()
{
return ZKPaths.makePath(queuePath, QUEUE_ITEM_NAME);
}
@VisibleForTesting
ChildrenCache getCache()
{
return childrenCache;
}
protected void sortChildren(List<String> children)
{
Collections.sort(children);
}
protected List<String> getChildren() throws Exception
{
return client.getChildren().forPath(queuePath);
}
protected long getDelay(String itemNode)
{
return 0;
}
protected boolean tryRemove(String itemNode) throws Exception
{
boolean isUsingLockSafety = (lockPath != null);
if ( isUsingLockSafety )
{
return processWithLockSafety(itemNode, ProcessType.REMOVE);
}
return processNormally(itemNode, ProcessType.REMOVE);
}
private boolean blockIfMaxed(int maxWait, TimeUnit unit) throws Exception
{
ChildrenCache.Data data = childrenCache.getData();
while ( data.children.size() >= maxItems )
{
long previousVersion = data.version;
data = childrenCache.blockingNextGetData(data.version, maxWait, unit);
if ( data.version == previousVersion )
{
return false;
}
}
return true;
}
private void runLoop()
{
long currentVersion = -1;
long maxWaitMs = -1;
try
{
while ( state.get() == State.STARTED )
{
try
{
ChildrenCache.Data data = (maxWaitMs > 0) ? childrenCache.blockingNextGetData(currentVersion, maxWaitMs, TimeUnit.MILLISECONDS) : childrenCache.blockingNextGetData(currentVersion);
currentVersion = data.version;
List<String> children = Lists.newArrayList(data.children);
sortChildren(children); // makes sure items are processed in the correct order
if ( children.size() > 0 )
{
maxWaitMs = getDelay(children.get(0));
if ( maxWaitMs > 0 )
{
continue;
}
}
else
{
continue;
}
processChildren(children, currentVersion);
}
catch ( InterruptedException e )
{
// swallow the interrupt as it's only possible from either a background
// operation and, thus, doesn't apply to this loop or the instance
// is being closed in which case the while test will get it
}
}
}
catch ( Exception e )
{
log.error("Exception caught in background handler", e);
}
}
private void processChildren(List<String> children, long currentVersion) throws Exception
{
final Semaphore processedLatch = new Semaphore(0);
final boolean isUsingLockSafety = (lockPath != null);
int min = minItemsBeforeRefresh;
for ( final String itemNode : children )
{
if ( Thread.currentThread().isInterrupted() )
{
processedLatch.release(children.size());
break;
}
if ( !itemNode.startsWith(QUEUE_ITEM_NAME) )
{
log.warn("Foreign node in queue path: " + itemNode);
processedLatch.release();
continue;
}
if ( min-- <= 0 )
{
if ( refreshOnWatch && (currentVersion != childrenCache.getData().version) )
{
processedLatch.release(children.size());
break;
}
}
if ( getDelay(itemNode) > 0 )
{
processedLatch.release();
continue;
}
executor.execute
(
new Runnable()
{
@Override
public void run()
{
try
{
if ( isUsingLockSafety )
{
processWithLockSafety(itemNode, ProcessType.NORMAL);
}
else
{
processNormally(itemNode, ProcessType.NORMAL);
}
}
catch ( Exception e )
{
ThreadUtils.checkInterrupted(e);
log.error("Error processing message at " + itemNode, e);
}
finally
{
processedLatch.release();
}
}
}
);
}
processedLatch.acquire(children.size());
}
private enum ProcessMessageBytesCode
{
NORMAL,
REQUEUE
}
private ProcessMessageBytesCode processMessageBytes(String itemNode, byte[] bytes) throws Exception
{
ProcessMessageBytesCode resultCode = ProcessMessageBytesCode.NORMAL;
MultiItem<T> items;
try
{
items = ItemSerializer.deserialize(bytes, serializer);
}
catch ( Throwable e )
{
ThreadUtils.checkInterrupted(e);
log.error("Corrupted queue item: " + itemNode, e);
return resultCode;
}
for(;;)
{
T item = items.nextItem();
if ( item == null )
{
break;
}
try
{
consumer.consumeMessage(item);
}
catch ( Throwable e )
{
ThreadUtils.checkInterrupted(e);
log.error("Exception processing queue item: " + itemNode, e);
if ( errorMode.get() == ErrorMode.REQUEUE )
{
resultCode = ProcessMessageBytesCode.REQUEUE;
break;
}
}
}
return resultCode;
}
private boolean processNormally(String itemNode, ProcessType type) throws Exception
{
try
{
String itemPath = ZKPaths.makePath(queuePath, itemNode);
Stat stat = new Stat();
byte[] bytes = null;
if ( type == ProcessType.NORMAL )
{
bytes = client.getData().storingStatIn(stat).forPath(itemPath);
}
if ( client.getState() == CuratorFrameworkState.STARTED )
{
client.delete().withVersion(stat.getVersion()).forPath(itemPath);
}
if ( type == ProcessType.NORMAL )
{
processMessageBytes(itemNode, bytes);
}
return true;
}
catch ( KeeperException.NodeExistsException ignore )
{
// another process got it
}
catch ( KeeperException.NoNodeException ignore )
{
// another process got it
}
catch ( KeeperException.BadVersionException ignore )
{
// another process got it
}
return false;
}
@VisibleForTesting
protected boolean processWithLockSafety(String itemNode, ProcessType type) throws Exception
{
String lockNodePath = ZKPaths.makePath(lockPath, itemNode);
boolean lockCreated = false;
try
{
client.create().withMode(CreateMode.EPHEMERAL).forPath(lockNodePath);
lockCreated = true;
String itemPath = ZKPaths.makePath(queuePath, itemNode);
boolean requeue = false;
byte[] bytes = null;
if ( type == ProcessType.NORMAL )
{
bytes = client.getData().forPath(itemPath);
requeue = (processMessageBytes(itemNode, bytes) == ProcessMessageBytesCode.REQUEUE);
}
if ( requeue )
{
client.inTransaction()
.delete().forPath(itemPath)
.and()
.create().withMode(CreateMode.PERSISTENT_SEQUENTIAL).forPath(makeRequeueItemPath(itemPath), bytes)
.and()
.commit();
}
else
{
client.delete().forPath(itemPath);
}
return true;
}
catch ( KeeperException.NodeExistsException ignore )
{
// another process got it
}
catch ( KeeperException.NoNodeException ignore )
{
// another process got it
}
catch ( KeeperException.BadVersionException ignore )
{
// another process got it
}
finally
{
if ( lockCreated )
{
client.delete().guaranteed().forPath(lockNodePath);
}
}
return false;
}
protected String makeRequeueItemPath(String itemPath)
{
return makeItemPath();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import java.io.IOException;
import java.util.Locale;
/**
* Defines what values to pick in the case a document contains multiple values for a particular field.
*/
public enum MultiValueMode implements Writeable<MultiValueMode> {
/**
* Pick the sum of all the values.
*/
SUM {
@Override
protected long pick(SortedNumericDocValues values, long missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
long total = 0;
for (int index = 0; index < count; ++index) {
total += values.valueAt(index);
}
return total;
} else {
return missingValue;
}
}
@Override
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int totalCount = 0;
long totalValue = 0;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
values.setDocument(doc);
final int count = values.count();
for (int index = 0; index < count; ++index) {
totalValue += values.valueAt(index);
}
totalCount += count;
}
return totalCount > 0 ? totalValue : missingValue;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
double total = 0;
for (int index = 0; index < count; ++index) {
total += values.valueAt(index);
}
return total;
} else {
return missingValue;
}
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int totalCount = 0;
double totalValue = 0;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
values.setDocument(doc);
final int count = values.count();
for (int index = 0; index < count; ++index) {
totalValue += values.valueAt(index);
}
totalCount += count;
}
return totalCount > 0 ? totalValue : missingValue;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected double pick(UnsortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
double total = 0;
for (int index = 0; index < count; ++index) {
total += values.valueAt(index);
}
return total;
} else {
return missingValue;
}
}
},
/**
* Pick the average of all the values.
*/
AVG {
@Override
protected long pick(SortedNumericDocValues values, long missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
long total = 0;
for (int index = 0; index < count; ++index) {
total += values.valueAt(index);
}
return count > 1 ? Math.round((double)total/(double)count) : total;
} else {
return missingValue;
}
}
@Override
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int totalCount = 0;
long totalValue = 0;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
values.setDocument(doc);
final int count = values.count();
for (int index = 0; index < count; ++index) {
totalValue += values.valueAt(index);
}
totalCount += count;
}
if (totalCount < 1) {
return missingValue;
}
return totalCount > 1 ? Math.round((double)totalValue/(double)totalCount) : totalValue;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
double total = 0;
for (int index = 0; index < count; ++index) {
total += values.valueAt(index);
}
return total/count;
} else {
return missingValue;
}
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int totalCount = 0;
double totalValue = 0;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
values.setDocument(doc);
final int count = values.count();
for (int index = 0; index < count; ++index) {
totalValue += values.valueAt(index);
}
totalCount += count;
}
if (totalCount < 1) {
return missingValue;
}
return totalValue/totalCount;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected double pick(UnsortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
double total = 0;
for (int index = 0; index < count; ++index) {
total += values.valueAt(index);
}
return total/count;
} else {
return missingValue;
}
}
},
/**
* Pick the median of the values.
*/
MEDIAN {
@Override
protected long pick(SortedNumericDocValues values, long missingValue, int doc) {
values.setDocument(doc);
int count = values.count();
if (count > 0) {
if (count % 2 == 0) {
count /= 2;
return Math.round((values.valueAt(count - 1) + values.valueAt(count))/2.0);
} else {
count /= 2;
return values.valueAt(count);
}
} else {
return missingValue;
}
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
int count = values.count();
if (count > 0) {
if (count % 2 == 0) {
count /= 2;
return (values.valueAt(count - 1) + values.valueAt(count))/2.0;
} else {
count /= 2;
return values.valueAt(count);
}
} else {
return missingValue;
}
}
},
/**
* Pick the lowest value.
*/
MIN {
@Override
protected long pick(SortedNumericDocValues values, long missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
return count > 0 ? values.valueAt(0) : missingValue;
}
@Override
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int totalCount = 0;
long minValue = Long.MAX_VALUE;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
minValue = Math.min(minValue, values.valueAt(0));
}
totalCount += count;
}
return totalCount > 0 ? minValue : missingValue;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
int count = values.count();
return count > 0 ? values.valueAt(0) : missingValue;
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int totalCount = 0;
double minValue = Double.MAX_VALUE;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
minValue = Math.min(minValue, values.valueAt(0));
}
totalCount += count;
}
return totalCount > 0 ? minValue : missingValue;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected BytesRef pick(SortedBinaryDocValues values, BytesRef missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
return count > 0 ? values.valueAt(0) : missingValue;
}
@Override
protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
BytesRefBuilder value = null;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
final BytesRef innerValue = values.get(doc);
if (innerValue != null) {
if (value == null) {
builder.copyBytes(innerValue);
value = builder;
} else {
final BytesRef min = value.get().compareTo(innerValue) <= 0 ? value.get() : innerValue;
if (min == innerValue) {
value.copyBytes(min);
}
}
}
}
return value == null ? null : value.get();
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected int pick(RandomAccessOrds values, int doc) {
values.setDocument(doc);
return values.cardinality() > 0 ? (int)values.ordAt(0) : -1;
}
@Override
protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int ord = -1;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
final int innerOrd = values.getOrd(doc);
if (innerOrd != -1) {
ord = ord == -1 ? innerOrd : Math.min(ord, innerOrd);
}
}
return ord;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected double pick(UnsortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
int count = values.count();
double min = Double.MAX_VALUE;
for (int index = 0; index < count; ++index) {
min = Math.min(values.valueAt(index), min);
}
return count > 0 ? min : missingValue;
}
},
/**
* Pick the highest value.
*/
MAX {
@Override
protected long pick(SortedNumericDocValues values, long missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
return count > 0 ? values.valueAt(count - 1) : missingValue;
}
@Override
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int totalCount = 0;
long maxValue = Long.MIN_VALUE;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
maxValue = Math.max(maxValue, values.valueAt(count - 1));
}
totalCount += count;
}
return totalCount > 0 ? maxValue : missingValue;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
return count > 0 ? values.valueAt(count - 1) : missingValue;
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int totalCount = 0;
double maxValue = Double.MIN_VALUE;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
values.setDocument(doc);
final int count = values.count();
if (count > 0) {
maxValue = Math.max(maxValue, values.valueAt(count - 1));
}
totalCount += count;
}
return totalCount > 0 ? maxValue : missingValue;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected BytesRef pick(SortedBinaryDocValues values, BytesRef missingValue, int doc) {
values.setDocument(doc);
final int count = values.count();
return count > 0 ? values.valueAt(count - 1) : missingValue;
}
@Override
protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
BytesRefBuilder value = null;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
final BytesRef innerValue = values.get(doc);
if (innerValue != null) {
if (value == null) {
builder.copyBytes(innerValue);
value = builder;
} else {
final BytesRef max = value.get().compareTo(innerValue) > 0 ? value.get() : innerValue;
if (max == innerValue) {
value.copyBytes(max);
}
}
}
}
return value == null ? null : value.get();
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected int pick(RandomAccessOrds values, int doc) {
values.setDocument(doc);
final int count = values.cardinality();
return count > 0 ? (int)values.ordAt(count - 1) : -1;
}
@Override
protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) {
try {
int ord = -1;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
final int innerOrd = values.getOrd(doc);
if (innerOrd != -1) {
ord = Math.max(ord, innerOrd);
}
}
return ord;
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
@Override
protected double pick(UnsortedNumericDoubleValues values, double missingValue, int doc) {
values.setDocument(doc);
int count = values.count();
double max = Double.MIN_VALUE;
for (int index = 0; index < count; ++index) {
max = Math.max(values.valueAt(index), max);
}
return count > 0 ? max : missingValue;
}
};
/**
* A case insensitive version of {@link #valueOf(String)}
*
* @throws IllegalArgumentException if the given string doesn't match a sort mode or is <code>null</code>.
*/
public static MultiValueMode fromString(String sortMode) {
try {
return valueOf(sortMode.toUpperCase(Locale.ROOT));
} catch (Throwable t) {
throw new IllegalArgumentException("Illegal sort mode: " + sortMode);
}
}
/**
* Return a {@link NumericDocValues} instance that can be used to sort documents
* with this mode and the provided values. When a document has no value,
* <code>missingValue</code> is returned.
*
* Allowed Modes: SUM, AVG, MEDIAN, MIN, MAX
*/
public NumericDocValues select(final SortedNumericDocValues values, final long missingValue) {
final NumericDocValues singleton = DocValues.unwrapSingleton(values);
if (singleton != null) {
final Bits docsWithField = DocValues.unwrapSingletonBits(values);
if (docsWithField == null || missingValue == 0) {
return singleton;
} else {
return new NumericDocValues() {
@Override
public long get(int docID) {
final long value = singleton.get(docID);
if (value == 0 && docsWithField.get(docID) == false) {
return missingValue;
}
return value;
}
};
}
} else {
return new NumericDocValues() {
@Override
public long get(int docID) {
return pick(values, missingValue, docID);
}
};
}
}
protected long pick(SortedNumericDocValues values, long missingValue, int doc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Return a {@link NumericDocValues} instance that can be used to sort root documents
* with this mode, the provided values and filters for root/inner documents.
*
* For every root document, the values of its inner documents will be aggregated.
* If none of the inner documents has a value, then <code>missingValue</code> is returned.
*
* Allowed Modes: SUM, AVG, MIN, MAX
*
* NOTE: Calling the returned instance on docs that are not root docs is illegal
* The returned instance can only be evaluate the current and upcoming docs
*/
public NumericDocValues select(final SortedNumericDocValues values, final long missingValue, final BitSet rootDocs, final DocIdSet innerDocSet, int maxDoc) throws IOException {
if (rootDocs == null || innerDocSet == null) {
return select(DocValues.emptySortedNumeric(maxDoc), missingValue);
}
final DocIdSetIterator innerDocs = innerDocSet.iterator();
if (innerDocs == null) {
return select(DocValues.emptySortedNumeric(maxDoc), missingValue);
}
return new NumericDocValues() {
int lastSeenRootDoc = 0;
long lastEmittedValue = missingValue;
@Override
public long get(int rootDoc) {
assert rootDocs.get(rootDoc) : "can only sort root documents";
assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs";
if (rootDoc == lastSeenRootDoc) {
return lastEmittedValue;
}
try {
final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1);
final int firstNestedDoc;
if (innerDocs.docID() > prevRootDoc) {
firstNestedDoc = innerDocs.docID();
} else {
firstNestedDoc = innerDocs.advance(prevRootDoc + 1);
}
lastSeenRootDoc = rootDoc;
lastEmittedValue = pick(values, missingValue, innerDocs, firstNestedDoc, rootDoc);
return lastEmittedValue;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
}
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Return a {@link NumericDoubleValues} instance that can be used to sort documents
* with this mode and the provided values. When a document has no value,
* <code>missingValue</code> is returned.
*
* Allowed Modes: SUM, AVG, MEDIAN, MIN, MAX
*/
public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue) {
final NumericDoubleValues singleton = FieldData.unwrapSingleton(values);
if (singleton != null) {
final Bits docsWithField = FieldData.unwrapSingletonBits(values);
if (docsWithField == null || missingValue == 0) {
return singleton;
} else {
return new NumericDoubleValues() {
@Override
public double get(int docID) {
final double value = singleton.get(docID);
if (value == 0 && docsWithField.get(docID) == false) {
return missingValue;
}
return value;
}
};
}
} else {
return new NumericDoubleValues() {
@Override
public double get(int docID) {
return pick(values, missingValue, docID);
}
};
}
}
protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Return a {@link NumericDoubleValues} instance that can be used to sort root documents
* with this mode, the provided values and filters for root/inner documents.
*
* For every root document, the values of its inner documents will be aggregated.
* If none of the inner documents has a value, then <code>missingValue</code> is returned.
*
* Allowed Modes: SUM, AVG, MIN, MAX
*
* NOTE: Calling the returned instance on docs that are not root docs is illegal
* The returned instance can only be evaluate the current and upcoming docs
*/
public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue, final BitSet rootDocs, final DocIdSet innerDocSet, int maxDoc) throws IOException {
if (rootDocs == null || innerDocSet == null) {
return select(FieldData.emptySortedNumericDoubles(maxDoc), missingValue);
}
final DocIdSetIterator innerDocs = innerDocSet.iterator();
if (innerDocs == null) {
return select(FieldData.emptySortedNumericDoubles(maxDoc), missingValue);
}
return new NumericDoubleValues() {
int lastSeenRootDoc = 0;
double lastEmittedValue = missingValue;
@Override
public double get(int rootDoc) {
assert rootDocs.get(rootDoc) : "can only sort root documents";
assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs";
if (rootDoc == lastSeenRootDoc) {
return lastEmittedValue;
}
try {
final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1);
final int firstNestedDoc;
if (innerDocs.docID() > prevRootDoc) {
firstNestedDoc = innerDocs.docID();
} else {
firstNestedDoc = innerDocs.advance(prevRootDoc + 1);
}
lastSeenRootDoc = rootDoc;
lastEmittedValue = pick(values, missingValue, innerDocs, firstNestedDoc, rootDoc);
return lastEmittedValue;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
}
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Return a {@link BinaryDocValues} instance that can be used to sort documents
* with this mode and the provided values. When a document has no value,
* <code>missingValue</code> is returned.
*
* Allowed Modes: MIN, MAX
*/
public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue) {
final BinaryDocValues singleton = FieldData.unwrapSingleton(values);
if (singleton != null) {
final Bits docsWithField = FieldData.unwrapSingletonBits(values);
if (docsWithField == null) {
return singleton;
} else {
return new BinaryDocValues() {
@Override
public BytesRef get(int docID) {
final BytesRef value = singleton.get(docID);
if (value.length == 0 && docsWithField.get(docID) == false) {
return missingValue;
}
return value;
}
};
}
} else {
return new BinaryDocValues() {
@Override
public BytesRef get(int docID) {
return pick(values, missingValue, docID);
}
};
}
}
protected BytesRef pick(SortedBinaryDocValues values, BytesRef missingValue, int doc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Return a {@link BinaryDocValues} instance that can be used to sort root documents
* with this mode, the provided values and filters for root/inner documents.
*
* For every root document, the values of its inner documents will be aggregated.
* If none of the inner documents has a value, then <code>missingValue</code> is returned.
*
* Allowed Modes: MIN, MAX
*
* NOTE: Calling the returned instance on docs that are not root docs is illegal
* The returned instance can only be evaluate the current and upcoming docs
*/
public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue, final BitSet rootDocs, final DocIdSet innerDocSet, int maxDoc) throws IOException {
if (rootDocs == null || innerDocSet == null) {
return select(FieldData.emptySortedBinary(maxDoc), missingValue);
}
final DocIdSetIterator innerDocs = innerDocSet.iterator();
if (innerDocs == null) {
return select(FieldData.emptySortedBinary(maxDoc), missingValue);
}
final BinaryDocValues selectedValues = select(values, null);
return new BinaryDocValues() {
final BytesRefBuilder builder = new BytesRefBuilder();
int lastSeenRootDoc = 0;
BytesRef lastEmittedValue = missingValue;
@Override
public BytesRef get(int rootDoc) {
assert rootDocs.get(rootDoc) : "can only sort root documents";
assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs";
if (rootDoc == lastSeenRootDoc) {
return lastEmittedValue;
}
try {
final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1);
final int firstNestedDoc;
if (innerDocs.docID() > prevRootDoc) {
firstNestedDoc = innerDocs.docID();
} else {
firstNestedDoc = innerDocs.advance(prevRootDoc + 1);
}
lastSeenRootDoc = rootDoc;
lastEmittedValue = pick(selectedValues, builder, innerDocs, firstNestedDoc, rootDoc);
if (lastEmittedValue == null) {
lastEmittedValue = missingValue;
}
return lastEmittedValue;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
}
protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Return a {@link SortedDocValues} instance that can be used to sort documents
* with this mode and the provided values.
*
* Allowed Modes: MIN, MAX
*/
public SortedDocValues select(final RandomAccessOrds values) {
if (values.getValueCount() >= Integer.MAX_VALUE) {
throw new UnsupportedOperationException("fields containing more than " + (Integer.MAX_VALUE-1) + " unique terms are unsupported");
}
final SortedDocValues singleton = DocValues.unwrapSingleton(values);
if (singleton != null) {
return singleton;
} else {
return new SortedDocValues() {
@Override
public int getOrd(int docID) {
return pick(values, docID);
}
@Override
public BytesRef lookupOrd(int ord) {
return values.lookupOrd(ord);
}
@Override
public int getValueCount() {
return (int)values.getValueCount();
}
};
}
}
protected int pick(RandomAccessOrds values, int doc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Return a {@link SortedDocValues} instance that can be used to sort root documents
* with this mode, the provided values and filters for root/inner documents.
*
* For every root document, the values of its inner documents will be aggregated.
*
* Allowed Modes: MIN, MAX
*
* NOTE: Calling the returned instance on docs that are not root docs is illegal
* The returned instance can only be evaluate the current and upcoming docs
*/
public SortedDocValues select(final RandomAccessOrds values, final BitSet rootDocs, final DocIdSet innerDocSet) throws IOException {
if (rootDocs == null || innerDocSet == null) {
return select(DocValues.emptySortedSet());
}
final DocIdSetIterator innerDocs = innerDocSet.iterator();
if (innerDocs == null) {
return select(DocValues.emptySortedSet());
}
final SortedDocValues selectedValues = select(values);
return new SortedDocValues() {
int lastSeenRootDoc = 0;
int lastEmittedOrd = -1;
@Override
public BytesRef lookupOrd(int ord) {
return selectedValues.lookupOrd(ord);
}
@Override
public int getValueCount() {
return selectedValues.getValueCount();
}
@Override
public int getOrd(int rootDoc) {
assert rootDocs.get(rootDoc) : "can only sort root documents";
assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs";
if (rootDoc == lastSeenRootDoc) {
return lastEmittedOrd;
}
try {
final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1);
final int firstNestedDoc;
if (innerDocs.docID() > prevRootDoc) {
firstNestedDoc = innerDocs.docID();
} else {
firstNestedDoc = innerDocs.advance(prevRootDoc + 1);
}
lastSeenRootDoc = rootDoc;
return lastEmittedOrd = pick(selectedValues, innerDocs, firstNestedDoc, rootDoc);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
}
protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Return a {@link NumericDoubleValues} instance that can be used to sort documents
* with this mode and the provided values. When a document has no value,
* <code>missingValue</code> is returned.
*
* Allowed Modes: SUM, AVG, MIN, MAX
*/
public NumericDoubleValues select(final UnsortedNumericDoubleValues values, final double missingValue) {
return new NumericDoubleValues() {
@Override
public double get(int docID) {
return pick(values, missingValue, docID);
}
};
}
protected double pick(UnsortedNumericDoubleValues values, final double missingValue, int doc) {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
/**
* Interface allowing custom value generators to be used in MultiValueMode.
*/
public interface UnsortedNumericDoubleValues {
int count();
void setDocument(int docId);
double valueAt(int index);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal());
}
public static MultiValueMode readMultiValueModeFrom(StreamInput in) throws IOException {
return MultiValueMode.AVG.readFrom(in);
}
@Override
public MultiValueMode readFrom(StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown MultiValueMode ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.javascript.rhino.Node;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
/**
* An object that optimizes the order of compiler passes.
*
* @author nicksantos@google.com (Nick Santos)
* @author dimvar@google.com (Dimitris Vardoulakis)
*/
class PhaseOptimizer implements CompilerPass {
private static final Logger logger =
Logger.getLogger(PhaseOptimizer.class.getName());
private final AbstractCompiler compiler;
private final PerformanceTracker tracker;
private final List<CompilerPass> passes;
private boolean inLoop;
private PassFactory sanityCheck;
private boolean printAstHashcodes = false;
private double progress = 0.0;
private double progressStep = 0.0;
private final ProgressRange progressRange;
// These fields are used during optimization loops.
// They are declared here for two reasons:
// 1) Loop and ScopedChangeHandler can communicate via shared state
// 2) Compiler talks to PhaseOptimizer, not Loop or ScopedChangeHandler
private NamedPass currentPass;
// For each pass, remember the time at the end of the pass's last run.
private Map<NamedPass, Integer> lastRuns;
private Node currentScope;
// Starts at 0, increases as "interesting" things happen.
// Nothing happens at time START_TIME, the first pass starts at time 1.
// The correctness of scope-change tracking relies on Node/getIntProp
// returning 0 if the custom attribute on a node hasn't been set.
private int timestamp;
// The time of the last change made to the program by any pass.
private int lastChange;
private static final int START_TIME = 0;
private final Node jsRoot;
// Compiler/reportChangeToScope must call reportCodeChange to update all
// change handlers. This flag prevents double update in ScopedChangeHandler.
private boolean crossScopeReporting;
// Used for sanity checks between loopable passes
private Node lastAst;
private Map<Node, Node> mtoc; // Stands for "main to clone"
/**
* When processing loopable passes in order, the PhaseOptimizer can be in one
* of these two states.
* <p>
* This enum is used by Loop/process only, but enum types can't be local.
*/
enum State {
RUN_PASSES_NOT_RUN_IN_PREV_ITER,
RUN_PASSES_THAT_CHANGED_STH_IN_PREV_ITER
}
// NOTE(dimvar): There used to be some code that tried various orderings of
// loopable passes and picked the fastest one. This code became stale
// gradually and I decided to remove it. It was also never tried after the
// new pass scheduler was written. If we need to revisit this order in the
// future, we should write new code to do it.
@VisibleForTesting
static final List<String> OPTIMAL_ORDER = ImmutableList.of(
"deadAssignmentsElimination",
"inlineFunctions",
"removeUnusedPrototypeProperties",
"removeUnreachableCode",
"removeUnusedVars",
"minimizeExitPoints",
"inlineVariables",
"collapseObjectLiterals",
"peepholeOptimizations");
static final int MAX_LOOPS = 100;
static final String OPTIMIZE_LOOP_ERROR =
"Fixed point loop exceeded the maximum number of iterations.";
/**
* @param comp the compiler that owns/creates this.
* @param tracker an optional performance tracker
* @param range the progress range for the process function or null
* if progress should not be reported.
*/
PhaseOptimizer(
AbstractCompiler comp, PerformanceTracker tracker, ProgressRange range) {
this.compiler = comp;
this.jsRoot = comp.getJsRoot();
this.tracker = tracker;
this.passes = Lists.newArrayList();
this.progressRange = range;
this.inLoop = false;
this.crossScopeReporting = false;
this.timestamp = this.lastChange = START_TIME;
}
/**
* Add the passes generated by the given factories to the compile sequence.
* <p>
* Automatically pulls multi-run passes into fixed point loops. If there
* are 1 or more multi-run passes in a row, they will run together in
* the same fixed point loop. The passes will run until they are finished
* making changes.
* <p>
* The PhaseOptimizer is free to tweak the order and frequency of multi-run
* passes in a fixed-point loop.
*/
void consume(List<PassFactory> factories) {
Loop currentLoop = new Loop();
boolean isCurrentLoopPopulated = false;
for (PassFactory factory : factories) {
if (factory.isOneTimePass()) {
if (isCurrentLoopPopulated) {
passes.add(currentLoop);
currentLoop = new Loop();
isCurrentLoopPopulated = false;
}
addOneTimePass(factory);
} else {
currentLoop.addLoopedPass(factory);
isCurrentLoopPopulated = true;
}
}
if (isCurrentLoopPopulated) {
passes.add(currentLoop);
}
}
/**
* Add the pass generated by the given factory to the compile sequence.
* This pass will be run once.
*/
@VisibleForTesting
void addOneTimePass(PassFactory factory) {
passes.add(new NamedPass(factory));
}
/**
* Add a loop to the compile sequence. This loop will continue running
* until the AST stops changing.
* @return The loop structure. Pass suppliers should be added to the loop.
*/
Loop addFixedPointLoop() {
Loop loop = new Loop();
passes.add(loop);
return loop;
}
/**
* Adds a sanity checker to be run after every pass. Intended for development.
*/
void setSanityCheck(PassFactory sanityCheck) {
this.sanityCheck = sanityCheck;
setSanityCheckState();
}
private void setSanityCheckState() {
if (inLoop) {
lastAst = jsRoot.cloneTree();
mtoc = NodeUtil.mapMainToClone(jsRoot, lastAst);
}
}
/**
* Sets the hashcode of the AST to be logged every pass.
* Intended for development.
*/
void setPrintAstHashcodes(boolean printAstHashcodes) {
this.printAstHashcodes = printAstHashcodes;
}
/**
* Run all the passes in the optimizer.
*/
@Override
public void process(Node externs, Node root) {
progress = 0.0;
progressStep = 0.0;
if (progressRange != null) {
progressStep = (progressRange.maxValue - progressRange.initialValue)
/ passes.size();
progress = progressRange.initialValue;
}
for (CompilerPass pass : passes) {
pass.process(externs, root);
if (hasHaltingErrors()) {
return;
}
}
}
private void maybePrintAstHashcodes(String passName, Node root) {
if (printAstHashcodes) {
String hashCodeMsg = "AST hashCode after " + passName + ": " +
compiler.toSource(root).hashCode();
System.err.println(hashCodeMsg);
compiler.addToDebugLog(hashCodeMsg);
}
}
/**
* Runs the sanity check if it is available.
*/
private void maybeSanityCheck(Node externs, Node root) {
if (sanityCheck != null) {
sanityCheck.create(compiler).process(externs, root);
// The cross-module passes are loopable and ran together, but do not
// participate in the other optimization loops, and are not relevant to
// tracking changed scopes.
if (inLoop &&
!currentPass.name.equals(Compiler.CROSS_MODULE_CODE_MOTION_NAME) &&
!currentPass.name.equals(Compiler.CROSS_MODULE_METHOD_MOTION_NAME)) {
NodeUtil.verifyScopeChanges(mtoc, jsRoot, true, compiler);
}
}
}
private boolean hasHaltingErrors() {
return compiler.hasHaltingErrors();
}
/**
* A single compiler pass.
*/
class NamedPass implements CompilerPass {
final String name;
private final PassFactory factory;
private Tracer tracer;
NamedPass(PassFactory factory) {
this.name = factory.getName();
this.factory = factory;
}
@Override
public void process(Node externs, Node root) {
logger.fine(name);
if (sanityCheck != null) {
// Before running the pass, clone the AST so you can sanity-check the
// changed AST against the clone after the pass finishes.
setSanityCheckState();
}
if (tracker != null) {
tracker.recordPassStart(name, factory.isOneTimePass());
}
tracer = new Tracer("JSCompiler");
compiler.beforePass(name);
// Delay the creation of the actual pass until *after* all previous passes
// have been processed.
// Some precondition checks rely on this, eg, in CoalesceVariableNames.
factory.create(compiler).process(externs, root);
compiler.afterPass(name);
try {
if (progressRange == null) {
compiler.setProgress(-1, name);
} else {
progress += progressStep;
compiler.setProgress(progress, name);
}
if (tracker != null) {
tracker.recordPassStop(name, tracer.stop());
}
maybePrintAstHashcodes(name, root);
maybeSanityCheck(externs, root);
} catch (IllegalStateException e) {
// TODO(johnlenz): Remove this once the normalization checks report
// errors instead of exceptions.
throw new RuntimeException("Sanity check failed for " + name, e);
}
}
}
void setScope(Node n) {
// NodeTraversal causes setScope calls outside loops; ignore them.
if (inLoop) {
// Find the top-level node in the scope.
currentScope = n.isFunction() ? n : getEnclosingScope(n);
}
}
boolean hasScopeChanged(Node n) {
// Outside loops we don't track changed scopes, so we visit them all.
if (!inLoop) {
return true;
}
int timeOfLastRun = lastRuns.get(currentPass);
// A pass looks at all functions when it first runs
return timeOfLastRun == START_TIME
|| n.getChangeTime() > timeOfLastRun;
}
private Node getEnclosingScope(Node n) {
while (n != jsRoot && n.getParent() != null) {
n = n.getParent();
if (n.isFunction()) {
return n;
}
}
return n;
}
void reportChangeToEnclosingScope(Node n) {
lastChange = timestamp;
getEnclosingScope(n).setChangeTime(timestamp);
// Every code change happens at a different time
timestamp++;
}
/**
* Records that the currently-running pass may report cross-scope changes.
* When this happens, we don't want to falsely report the current scope as
* changed when reportChangeToScope is called from Compiler.
*/
void startCrossScopeReporting() {
crossScopeReporting = true;
}
/** The currently-running pass won't report cross-scope changes. */
void endCrossScopeReporting() {
crossScopeReporting = false;
}
/**
* A change handler that marks scopes as changed when reportChange is called.
*/
private class ScopedChangeHandler implements CodeChangeHandler {
private int lastCodeChangeQuery;
ScopedChangeHandler() {
this.lastCodeChangeQuery = timestamp;
}
@Override
public void reportChange() {
if (crossScopeReporting) {
// This call was caused by Compiler/reportChangeToEnclosingScope,
// do nothing.
return;
}
lastChange = timestamp;
currentScope.setChangeTime(timestamp);
// Every code change happens at a different time
timestamp++;
}
private boolean hasCodeChangedSinceLastCall() {
boolean result = lastChange > lastCodeChangeQuery;
lastCodeChangeQuery = timestamp;
// The next call to the method will happen at a different time
timestamp++;
return result;
}
}
/**
* A compound pass that contains atomic passes and runs them until they reach
* a fixed point.
* <p>
* Notice that this is a non-static class, because it includes the closure
* of PhaseOptimizer.
*/
@VisibleForTesting
class Loop implements CompilerPass {
private final List<NamedPass> myPasses = Lists.newArrayList();
private final Set<String> myNames = Sets.newHashSet();
private ScopedChangeHandler scopeHandler;
void addLoopedPass(PassFactory factory) {
String name = factory.getName();
Preconditions.checkArgument(!myNames.contains(name),
"Already a pass with name '%s' in this loop", name);
myNames.add(name);
myPasses.add(new NamedPass(factory));
}
@Override
public void process(Node externs, Node root) {
Preconditions.checkState(!inLoop, "Nested loops are forbidden");
inLoop = true;
optimizePasses();
// Set up function-change tracking
scopeHandler = new ScopedChangeHandler();
compiler.addChangeHandler(scopeHandler);
setScope(root);
// lastRuns is initialized before each loop. This way, when a pass is run
// in the 2nd loop for the 1st time, it looks at all scopes.
lastRuns = new HashMap<>();
for (NamedPass pass : myPasses) {
lastRuns.put(pass, START_TIME);
}
// Contains a pass iff it made changes the last time it was run.
Set<NamedPass> madeChanges = Sets.newHashSet();
// Contains a pass iff it was run during the last inner loop.
Set<NamedPass> runInPrevIter = Sets.newHashSet();
State state = State.RUN_PASSES_NOT_RUN_IN_PREV_ITER;
boolean lastIterMadeChanges;
int count = 0;
try {
while (true) {
if (count++ > MAX_LOOPS) {
compiler.throwInternalError(OPTIMIZE_LOOP_ERROR, null);
}
lastIterMadeChanges = false;
for (NamedPass pass : myPasses) {
if ((state == State.RUN_PASSES_NOT_RUN_IN_PREV_ITER
&& !runInPrevIter.contains(pass))
|| (state == State.RUN_PASSES_THAT_CHANGED_STH_IN_PREV_ITER
&& madeChanges.contains(pass))) {
timestamp++;
currentPass = pass;
pass.process(externs, root);
runInPrevIter.add(pass);
lastRuns.put(pass, timestamp);
if (hasHaltingErrors()) {
return;
} else if (scopeHandler.hasCodeChangedSinceLastCall()) {
madeChanges.add(pass);
lastIterMadeChanges = true;
} else {
madeChanges.remove(pass);
}
} else {
runInPrevIter.remove(pass);
}
}
if (state == State.RUN_PASSES_NOT_RUN_IN_PREV_ITER) {
if (lastIterMadeChanges) {
state = State.RUN_PASSES_THAT_CHANGED_STH_IN_PREV_ITER;
} else {
return;
}
} else { // state == State.RUN_PASSES_THAT_CHANGED_STH_IN_PREV_ITER
if (!lastIterMadeChanges) {
state = State.RUN_PASSES_NOT_RUN_IN_PREV_ITER;
}
}
}
} finally {
inLoop = false;
compiler.removeChangeHandler(scopeHandler);
}
}
/** Re-arrange the passes in an optimal order. */
private void optimizePasses() {
// It's important that this ordering is deterministic, so that
// multiple compiles with the same input produce exactly the same
// results.
//
// To do this, grab any passes we recognize, and move them to the end
// in an "optimal" order.
List<NamedPass> optimalPasses = Lists.newArrayList();
for (String passInOptimalOrder : OPTIMAL_ORDER) {
for (NamedPass loopablePass : myPasses) {
if (loopablePass.name.equals(passInOptimalOrder)) {
optimalPasses.add(loopablePass);
break;
}
}
}
myPasses.removeAll(optimalPasses);
myPasses.addAll(optimalPasses);
}
}
/**
* An object used when running many NamedPass loopable passes as a Loop pass,
* to keep track of how far along we are.
*/
static class ProgressRange {
public final double initialValue;
public final double maxValue;
public ProgressRange(double initialValue, double maxValue) {
this.initialValue = initialValue;
this.maxValue = maxValue;
}
}
}
| |
package com.yunhuwifi.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.LinearGradient;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.PorterDuff.Mode;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Shader.TileMode;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
public final class ImageTools {
public static Bitmap drawableToBitmap(Drawable drawable) {
int w = drawable.getIntrinsicWidth();
int h = drawable.getIntrinsicHeight();
Bitmap.Config config = drawable.getOpacity() != PixelFormat.OPAQUE ? Bitmap.Config.ARGB_8888
: Bitmap.Config.RGB_565;
Bitmap bitmap = Bitmap.createBitmap(w, h, config);
Canvas canvas = new Canvas(bitmap);
drawable.setBounds(0, 0, w, h);
drawable.draw(canvas);
return bitmap;
}
/**
* Bitmap to drawable
*
* @param bitmap
* @return
*/
public static Drawable bitmapToDrawable(Bitmap bitmap) {
return new BitmapDrawable(bitmap);
}
/**
* Input stream to bitmap
*
* @param inputStream
* @return
* @throws Exception
*/
public static Bitmap inputStreamToBitmap(InputStream inputStream)
throws Exception {
return BitmapFactory.decodeStream(inputStream);
}
/**
* Byte transfer to bitmap
*
* @param byteArray
* @return
*/
public static Bitmap byteToBitmap(byte[] byteArray) {
if (byteArray.length != 0) {
return BitmapFactory
.decodeByteArray(byteArray, 0, byteArray.length);
} else {
return null;
}
}
/**
* Byte transfer to drawable
*
* @param byteArray
* @return
*/
public static Drawable byteToDrawable(byte[] byteArray) {
ByteArrayInputStream ins = null;
if (byteArray != null) {
ins = new ByteArrayInputStream(byteArray);
}
return Drawable.createFromStream(ins, null);
}
/**
* Bitmap transfer to bytes
*
* @param byteArray
* @return
*/
public static byte[] bitmapToBytes(Bitmap bm) {
byte[] bytes = null;
if (bm != null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, baos);
bytes = baos.toByteArray();
}
return bytes;
}
/**
* Drawable transfer to bytes
*
* @param drawable
* @return
*/
public static byte[] drawableToBytes(Drawable drawable) {
BitmapDrawable bitmapDrawable = (BitmapDrawable) drawable;
Bitmap bitmap = bitmapDrawable.getBitmap();
byte[] bytes = bitmapToBytes(bitmap);
;
return bytes;
}
/**
* Base64 to byte[]
// */
// public static byte[] base64ToBytes(String base64) throws IOException {
// byte[] bytes = Base64.decode(base64);
// return bytes;
// }
//
// /**
// * Byte[] to base64
// */
// public static String bytesTobase64(byte[] bytes) {
// String base64 = Base64.encode(bytes);
// return base64;
// }
/**
* Create reflection images
*
* @param bitmap
* @return
*/
public static Bitmap createReflectionImageWithOrigin(Bitmap bitmap) {
final int reflectionGap = 4;
int w = bitmap.getWidth();
int h = bitmap.getHeight();
Matrix matrix = new Matrix();
matrix.preScale(1, -1);
Bitmap reflectionImage = Bitmap.createBitmap(bitmap, 0, h / 2, w,
h / 2, matrix, false);
Bitmap bitmapWithReflection = Bitmap.createBitmap(w, (h + h / 2),
Config.ARGB_8888);
Canvas canvas = new Canvas(bitmapWithReflection);
canvas.drawBitmap(bitmap, 0, 0, null);
Paint deafalutPaint = new Paint();
canvas.drawRect(0, h, w, h + reflectionGap, deafalutPaint);
canvas.drawBitmap(reflectionImage, 0, h + reflectionGap, null);
Paint paint = new Paint();
LinearGradient shader = new LinearGradient(0, bitmap.getHeight(), 0,
bitmapWithReflection.getHeight() + reflectionGap, 0x70ffffff,
0x00ffffff, TileMode.CLAMP);
paint.setShader(shader);
// Set the Transfer mode to be porter duff and destination in
paint.setXfermode(new PorterDuffXfermode(Mode.DST_IN));
// Draw a rectangle using the paint with our linear gradient
canvas.drawRect(0, h, w, bitmapWithReflection.getHeight()
+ reflectionGap, paint);
return bitmapWithReflection;
}
/**
* Get rounded corner images
*
* @param bitmap
* @param roundPx
* 5 10
* @return
*/
public static Bitmap getRoundedCornerBitmap(Bitmap bitmap, float roundPx) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
Bitmap output = Bitmap.createBitmap(w, h, Config.ARGB_8888);
Canvas canvas = new Canvas(output);
final int color = 0xff424242;
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, w, h);
final RectF rectF = new RectF(rect);
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
paint.setColor(color);
canvas.drawRoundRect(rectF, roundPx, roundPx, paint);
paint.setXfermode(new PorterDuffXfermode(Mode.SRC_IN));
canvas.drawBitmap(bitmap, rect, rect, paint);
return output;
}
/**
* Resize the bitmap
*
* @param bitmap
* @param width
* @param height
* @return
*/
public static Bitmap zoomBitmap(Bitmap bitmap, int width, int height) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
Matrix matrix = new Matrix();
float scaleWidth = ((float) width / w);
float scaleHeight = ((float) height / h);
matrix.postScale(scaleWidth, scaleHeight);
Bitmap newbmp = Bitmap.createBitmap(bitmap, 0, 0, w, h, matrix, true);
return newbmp;
}
/**
* Resize the drawable
* @param drawable
* @param w
* @param h
* @return
*/
public static Drawable zoomDrawable(Drawable drawable, int w, int h) {
int width = drawable.getIntrinsicWidth();
int height = drawable.getIntrinsicHeight();
Bitmap oldbmp = drawableToBitmap(drawable);
Matrix matrix = new Matrix();
float sx = ((float) w / width);
float sy = ((float) h / height);
matrix.postScale(sx, sy);
Bitmap newbmp = Bitmap.createBitmap(oldbmp, 0, 0, width, height,
matrix, true);
return new BitmapDrawable(newbmp);
}
/**
* Get images from SD card by path and the name of image
* @param photoName
* @return
*/
public static Bitmap getPhotoFromSDCard(String path,String photoName){
Bitmap photoBitmap = BitmapFactory.decodeFile(path + "/" +photoName +".png");
if (photoBitmap == null) {
return null;
}else {
return photoBitmap;
}
}
/**
* Check the SD card
* @return
*/
public static boolean checkSDCardAvailable(){
return android.os.Environment.getExternalStorageState().equals(android.os.Environment.MEDIA_MOUNTED);
}
/**
* Get image from SD card by path and the name of image
* @param fileName
* @return
*/
public static boolean findPhotoFromSDCard(String path,String photoName){
boolean flag = false;
if (checkSDCardAvailable()) {
File dir = new File(path);
if (dir.exists()) {
File folders = new File(path);
File photoFile[] = folders.listFiles();
for (int i = 0; i < photoFile.length; i++) {
String fileName = photoFile[i].getName().split("\\.")[0];
if (fileName.equals(photoName)) {
flag = true;
}
}
}else {
flag = false;
}
// File file = new File(path + "/" + photoName + ".jpg" );
// if (file.exists()) {
// flag = true;
// }else {
// flag = false;
// }
}else {
flag = false;
}
return flag;
}
/**
* Save image to the SD card
* @param photoBitmap
* @param photoName
* @param path
*/
public static void savePhotoToSDCard(Bitmap photoBitmap,String path,String photoName){
if (checkSDCardAvailable()) {
File dir = new File(path);
if (!dir.exists()){
dir.mkdirs();
}
File photoFile = new File(path , photoName + ".png");
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(photoFile);
if (photoBitmap != null) {
if (photoBitmap.compress(Bitmap.CompressFormat.PNG, 100, fileOutputStream)) {
fileOutputStream.flush();
// fileOutputStream.close();
}
}
} catch (FileNotFoundException e) {
photoFile.delete();
e.printStackTrace();
} catch (IOException e) {
photoFile.delete();
e.printStackTrace();
} finally{
try {
fileOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* Delete the image from SD card
* @param context
* @param path
* file:///sdcard/temp.jpg
*/
public static void deleteAllPhoto(String path){
if (checkSDCardAvailable()) {
File folder = new File(path);
File[] files = folder.listFiles();
for (int i = 0; i < files.length; i++) {
files[i].delete();
}
}
}
public static void deletePhotoAtPathAndName(String path,String fileName){
if (checkSDCardAvailable()) {
File folder = new File(path);
File[] files = folder.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].getName().split("\\.")[0].equals(fileName)) {
files[i].delete();
}
}
}
}
}
| |
package com.medievallords.carbyne.profiles;
import com.gmail.filoghost.holographicdisplays.api.Hologram;
import com.gmail.filoghost.holographicdisplays.api.HologramsAPI;
import com.medievallords.carbyne.Carbyne;
import com.medievallords.carbyne.customevents.ProfileCreatedEvent;
import com.medievallords.carbyne.economy.objects.Account;
import com.medievallords.carbyne.utils.Cooldowns;
import com.medievallords.carbyne.utils.PlayerHealth;
import com.medievallords.carbyne.utils.StaticClasses;
import com.medievallords.carbyne.utils.nametag.NametagManager;
import com.medievallords.carbyne.utils.serialization.InventorySerialization;
import com.medievallords.carbyne.utils.tabbed.item.TextTabItem;
import com.medievallords.carbyne.utils.tabbed.tablist.TableTabList;
import com.palmergames.bukkit.towny.event.PlayerChangePlotEvent;
import com.palmergames.bukkit.towny.exceptions.NotRegisteredException;
import com.palmergames.bukkit.towny.object.Resident;
import com.palmergames.bukkit.towny.object.Town;
import com.palmergames.bukkit.towny.object.TownBlock;
import com.palmergames.bukkit.towny.object.TownyUniverse;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.scheduler.BukkitRunnable;
import java.util.HashMap;
import java.util.UUID;
import java.util.logging.Level;
/**
* Created by Calvin on 3/22/2017
* for the Carbyne project.
*/
public class ProfileListeners implements Listener {
private HashMap<UUID, Profile.PlayerTabRunnable> playerTabs = new HashMap<>();
String tablistHeader, tablistFooter;
public ProfileListeners() {
if (Carbyne.getInstance().getConfig().getString("TablistHeader") != null)
tablistHeader = ChatColor.translateAlternateColorCodes('&', Carbyne.getInstance().getConfig().getString("TablistHeader"));
if (Carbyne.getInstance().getConfig().getString("TablistFooter") != null)
tablistFooter = ChatColor.translateAlternateColorCodes('&', Carbyne.getInstance().getConfig().getString("TablistFooter"));
handleSkills();
}
private void handleSkills() {
new BukkitRunnable() {
@Override
public void run() {
try {
for (Player player : Bukkit.getOnlinePlayers()) {
PlayerHealth playerHealth = PlayerHealth.getPlayerHealth(player.getUniqueId());
playerHealth.runTickGeneral(player);
}
} catch (Exception e) {
System.out.println("Error player health warzone-------------");
e.printStackTrace();
}
}
}.runTaskTimerAsynchronously(Carbyne.getInstance(), 20, 20);
}
@EventHandler(priority = EventPriority.HIGH)
public void onJoin(PlayerJoinEvent event) {
Player player = event.getPlayer();
if (player == null)
return;
player.setAllowFlight(true);
player.setFlying(false);
Account account = Account.getAccount(player.getUniqueId());
if (account != null)
account.setAccountHolder(player.getName());
else
Account.createAccount(player.getUniqueId(), player.getName());
if (!StaticClasses.profileManager.hasProfile(player.getUniqueId())) {
StaticClasses.profileManager.createProfile(player);
ProfileCreatedEvent profileCreatedEvent = new ProfileCreatedEvent(player, StaticClasses.profileManager.getProfile(player.getUniqueId()));
Bukkit.getPluginManager().callEvent(profileCreatedEvent);
}
Profile profile = StaticClasses.profileManager.getProfile(player.getUniqueId());
double maxHealth = StaticClasses.gearManager.calculateMaxHealth(event.getPlayer(), false);
PlayerHealth playerHealth = new PlayerHealth(maxHealth, maxHealth, profile.isSkillsToggled());
playerHealth.setMaxHealth(maxHealth);
playerHealth.setHealth(maxHealth, player);
PlayerHealth.players.put(event.getPlayer().getUniqueId(), playerHealth);
if (!profile.getUsername().equalsIgnoreCase(player.getName())) {
try {
//Bukkit.broadcastMessage("Resident Found: " + TownyUniverse.getDataSource().getResident(profile.getUsername()).getName());
Resident resident = TownyUniverse.getDataSource().getResident(profile.getUsername());
TownyUniverse.getDataSource().renamePlayer(resident, player.getName());
if (resident.hasTown()) {
Town town = resident.getTown();
TownyUniverse.getDataSource().saveTown(town);
}
//Bukkit.broadcastMessage("Resident Replacement: " + TownyUniverse.getDataSource().getResident(player.getName()).getName());
} catch (NotRegisteredException ignored) {
} catch (Exception shouldNeverHappen) {
Carbyne.getInstance().getLogger().log(Level.SEVERE, "EXCEPTION OCCURRED IN TOWNY NAME UPDATER: ");
shouldNeverHappen.printStackTrace();
}
profile.setUsername(player.getName());
}
NametagManager.setup(player);
TownBlock townBlock = TownyUniverse.getTownBlock(player.getLocation());
if (townBlock == null)
profile.setPvpTimePaused(false);
else if (!townBlock.getPermissions().pvp)
profile.setPvpTimePaused(true);
else
profile.setPvpTimePaused(false);
if (profile.isShowTab()) {
TableTabList tab = Carbyne.getInstance().getTabbed().newTableTabList(event.getPlayer());
tab.setHeader(tablistHeader);
tab.setFooter(tablistFooter);
for (int i = 0; i < 4; i++)
for (int l = 0; l < 20; l++)
tab.set(i, l, new TextTabItem("", 1));
Profile.PlayerTabRunnable runnable = new Profile.PlayerTabRunnable(event.getPlayer(), profile, Account.getAccount(event.getPlayer().getUniqueId()), tab);
runnable.runTaskTimerAsynchronously(Carbyne.getInstance(), 5L, 20);
playerTabs.put(event.getPlayer().getUniqueId(), runnable);
}
Hologram holo = HologramsAPI.createHologram(Carbyne.getInstance(), new Location(Bukkit.getWorld("world"), -716.5, 108, 307.5));
holo.getVisibilityManager().showTo(player);
holo.getVisibilityManager().setVisibleByDefault(false);
StaticClasses.dailyBonusManager.getPlayerHolograms().put(player.getUniqueId(), holo);
}
@EventHandler
public void onQuit(PlayerQuitEvent event) {
PlayerHealth.players.remove(event.getPlayer().getUniqueId());
if (playerTabs.containsKey(event.getPlayer().getUniqueId()) && playerTabs.get(event.getPlayer().getUniqueId()) != null) {
playerTabs.get(event.getPlayer().getUniqueId()).cancel();
playerTabs.remove(event.getPlayer().getUniqueId());
}
StaticClasses.dailyBonusManager.getPlayerHolograms().get(event.getPlayer().getUniqueId()).delete();
StaticClasses.dailyBonusManager.getPlayerHolograms().remove(event.getPlayer().getUniqueId());
NametagManager.remove(event.getPlayer());
}
// @EventHandler
// public void onKick(PlayerKickEvent event) {
// PlayerHealth.players.remove(event.getPlayer().getUniqueId());
//
// if (playerTabs.containsKey(event.getPlayer().getUniqueId()) && playerTabs.get(event.getPlayer().getUniqueId()) != null) {
// playerTabs.get(event.getPlayer().getUniqueId()).cancel();
// playerTabs.remove(event.getPlayer().getUniqueId());
// }
// }
@EventHandler
public void onDeath(PlayerDeathEvent event) {
Player player = event.getEntity();
Profile profile = StaticClasses.profileManager.getProfile(player.getUniqueId());
profile.setPreviousInventoryContentString(InventorySerialization.serializePlayerInventoryAsString(player.getInventory()));
profile.setKillStreak(0);
if (StaticClasses.gearManager.isInFullCarbyne(player)) {
if (player.getKiller() != null) {
Player killer = player.getKiller();
Profile killerProfile = StaticClasses.profileManager.getProfile(killer.getUniqueId());
if (Cooldowns.tryCooldown(player.getUniqueId(), killer.getUniqueId().toString() + ":carbynedeath", 300000))
profile.setCarbyneDeaths(profile.getCarbyneDeaths() + 1);
if (Cooldowns.tryCooldown(killer.getUniqueId(), player.getUniqueId().toString() + ":carbynekill", 300000))
killerProfile.setCarbyneKills(killerProfile.getCarbyneKills() + 1);
} else
profile.setCarbyneDeaths(profile.getCarbyneDeaths() + 1);
} else {
if (player.getKiller() != null) {
Player killer = player.getKiller();
Profile killerProfile = StaticClasses.profileManager.getProfile(killer.getUniqueId());
if (Cooldowns.tryCooldown(player.getUniqueId(), killer.getUniqueId().toString() + ":death", 300000))
profile.setDeaths(profile.getDeaths() + 1);
if (Cooldowns.tryCooldown(killer.getUniqueId(), player.getUniqueId().toString() + ":kill", 300000))
killerProfile.setKills(killerProfile.getKills() + 1);
} else
profile.setDeaths(profile.getDeaths() + 1);
}
if (player.getKiller() != null) {
Profile killerProfile = StaticClasses.profileManager.getProfile(player.getKiller().getUniqueId());
if (Cooldowns.tryCooldown(player.getKiller().getUniqueId(), player.getUniqueId().toString() + ":killstreak", 300000))
killerProfile.setKillStreak(killerProfile.getKillStreak() + 1);
}
}
@EventHandler
public void plotChange(PlayerChangePlotEvent event) {
Player player = event.getPlayer();
Profile profile = StaticClasses.profileManager.getProfile(player.getUniqueId());
try {
if (!event.getTo().getTownBlock().getPermissions().pvp)
profile.setPvpTimePaused(true);
else
profile.setPvpTimePaused(false);
} catch (NotRegisteredException e) {
profile.setPvpTimePaused(false);
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeEditor.printing;
import com.intellij.application.options.CodeStyle;
import com.intellij.codeInsight.daemon.LineMarkerInfo;
import com.intellij.ide.highlighter.HighlighterFactory;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.editor.highlighter.HighlighterIterator;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import com.intellij.psi.impl.file.PsiDirectoryFactory;
import com.intellij.ui.ColorUtil;
import com.intellij.ui.Gray;
import com.intellij.ui.JBColor;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.util.*;
public class HTMLTextPainter {
private int myOffset = 0;
private final EditorHighlighter myHighlighter;
private final String myText;
private final String myFileName;
private int mySegmentEnd;
private final PsiFile myPsiFile;
private final Document myDocument;
private int lineCount;
private int myFirstLineNumber;
private final boolean myPrintLineNumbers;
private int myColumn;
private final List<LineMarkerInfo<PsiElement>> myMethodSeparators = new ArrayList<>();
private int myCurrentMethodSeparator;
private final Project myProject;
private final HtmlStyleManager htmlStyleManager;
public HTMLTextPainter(@NotNull PsiFile psiFile, @NotNull Project project, boolean printLineNumbers) {
this(psiFile, project, new HtmlStyleManager(false), printLineNumbers, true);
}
public HTMLTextPainter(@NotNull PsiFile psiFile, @NotNull Project project, @NotNull HtmlStyleManager htmlStyleManager, boolean printLineNumbers, boolean useMethodSeparators) {
myProject = project;
myPsiFile = psiFile;
this.htmlStyleManager = htmlStyleManager;
myPrintLineNumbers = printLineNumbers;
myHighlighter = HighlighterFactory.createHighlighter(project, psiFile.getVirtualFile());
myText = psiFile.getText();
myHighlighter.setText(myText);
mySegmentEnd = myText.length();
myFileName = psiFile.getVirtualFile().getPresentableUrl();
myDocument = PsiDocumentManager.getInstance(project).getDocument(psiFile);
if (useMethodSeparators && myDocument != null) {
myMethodSeparators.addAll(FileSeparatorProvider.getFileSeparators(psiFile, myDocument));
}
myCurrentMethodSeparator = 0;
}
@NotNull
public PsiFile getPsiFile() {
return myPsiFile;
}
public void setSegment(int segmentStart, int segmentEnd, int firstLineNumber) {
myOffset = segmentStart;
mySegmentEnd = segmentEnd;
myFirstLineNumber = firstLineNumber;
}
public void paint(@Nullable TreeMap refMap, @NotNull Writer writer, boolean isStandalone) throws IOException {
HighlighterIterator hIterator = myHighlighter.createIterator(myOffset);
if (hIterator.atEnd()) {
return;
}
lineCount = myFirstLineNumber;
TextAttributes prevAttributes = null;
Iterator refKeys = null;
int refOffset = -1;
PsiReference ref = null;
if (refMap != null) {
refKeys = refMap.keySet().iterator();
if (refKeys.hasNext()) {
Integer key = (Integer)refKeys.next();
ref = (PsiReference)refMap.get(key);
refOffset = key.intValue();
}
}
int referenceEnd = -1;
if (isStandalone) {
writeHeader(writer, isStandalone ? new File(myFileName).getName() : null);
}
else {
ensureStyles();
}
writer.write("<pre>");
if (myFirstLineNumber == 0) {
writeLineNumber(writer);
}
String closeTag = null;
getMethodSeparator(hIterator.getStart());
while (!hIterator.atEnd()) {
int hStart = hIterator.getStart();
int hEnd = hIterator.getEnd();
if (hEnd > mySegmentEnd) {
break;
}
// write whitespace as is
for (; hStart < hEnd; hStart++) {
char c = myText.charAt(hStart);
if (Character.isWhitespace(c)) {
if (closeTag != null && c == '\n') {
writer.write(closeTag);
closeTag = null;
}
writer.write(c);
}
else {
break;
}
}
if (hStart == hEnd) {
hIterator.advance();
continue;
}
if (refOffset > 0 && hStart <= refOffset && hEnd > refOffset) {
referenceEnd = writeReferenceTag(writer, ref);
}
TextAttributes textAttributes = hIterator.getTextAttributes();
if (htmlStyleManager.isDefaultAttributes(textAttributes)) {
textAttributes = null;
}
if (!equals(prevAttributes, textAttributes) && referenceEnd < 0) {
if (closeTag != null) {
writer.write(closeTag);
closeTag = null;
}
if (textAttributes != null) {
htmlStyleManager.writeTextStyle(writer, textAttributes);
closeTag = "</span>";
}
prevAttributes = textAttributes;
}
writeString(writer, myText, hStart, hEnd - hStart, myPsiFile);
if (referenceEnd > 0 && hEnd >= referenceEnd) {
writer.write("</a>");
referenceEnd = -1;
if (refKeys.hasNext()) {
Integer key = (Integer)refKeys.next();
ref = (PsiReference)refMap.get(key);
refOffset = key.intValue();
}
}
hIterator.advance();
}
if (closeTag != null) {
writer.write(closeTag);
}
writer.write("</pre>\n");
if (isStandalone) {
writer.write("</body>\n");
writer.write("</html>");
}
}
protected void ensureStyles() {
htmlStyleManager.ensureStyles(myHighlighter.createIterator(myOffset), myMethodSeparators);
}
@Nullable
private LineMarkerInfo getMethodSeparator(int offset) {
if (myDocument == null) {
return null;
}
int line = myDocument.getLineNumber(Math.max(0, Math.min(myDocument.getTextLength(), offset)));
LineMarkerInfo marker = null;
LineMarkerInfo tmpMarker;
while (myCurrentMethodSeparator < myMethodSeparators.size() &&
(tmpMarker = myMethodSeparators.get(myCurrentMethodSeparator)) != null &&
FileSeparatorProvider.getDisplayLine(tmpMarker, myDocument) <= line) {
marker = tmpMarker;
myCurrentMethodSeparator++;
}
return marker;
}
private int writeReferenceTag(Writer writer, PsiReference ref) throws IOException {
PsiFile refFile = Objects.requireNonNull(ref.resolve()).getContainingFile();
PsiDirectoryFactory psiDirectoryFactory = PsiDirectoryFactory.getInstance(myProject);
String refPackageName = psiDirectoryFactory.getQualifiedName(refFile.getContainingDirectory(), false);
String psiPackageName = psiDirectoryFactory.getQualifiedName(myPsiFile.getContainingDirectory(), false);
StringBuilder fileName = new StringBuilder();
if (!psiPackageName.equals(refPackageName)) {
StringTokenizer tokens = new StringTokenizer(psiPackageName, ".");
while(tokens.hasMoreTokens()) {
tokens.nextToken();
fileName.append("../");
}
StringTokenizer refTokens = new StringTokenizer(refPackageName, ".");
while(refTokens.hasMoreTokens()) {
String token = refTokens.nextToken();
fileName.append(token);
fileName.append('/');
}
}
fileName.append(ExportToHTMLManager.getHTMLFileName(refFile));
//noinspection HardCodedStringLiteral
writer.write("<a href=\""+fileName+"\">");
return ref.getElement().getTextRange().getEndOffset();
}
@SuppressWarnings({"HardCodedStringLiteral"})
private void writeString(Writer writer, CharSequence charArray, int start, int length, @NotNull PsiFile psiFile) throws IOException {
for (int i = start; i < start + length; i++) {
char c = charArray.charAt(i);
if (c == '<') {
writeChar(writer, "<");
}
else if (c == '>') {
writeChar(writer, ">");
}
else if (c == '&') {
writeChar(writer, "&");
}
else if (c == '\"') {
writeChar(writer, """);
}
else if (c == '\t') {
int tabSize = CodeStyle.getIndentOptions(psiFile).TAB_SIZE;
if (tabSize <= 0) tabSize = 1;
int nSpaces = tabSize - myColumn % tabSize;
for (int j = 0; j < nSpaces; j++) {
writeChar(writer, " ");
}
}
else if (c == '\n' || c == '\r') {
if (c == '\r' && i + 1 < start + length && charArray.charAt(i + 1) == '\n') {
//noinspection AssignmentToForLoopParameter
i++;
}
else if (c == '\n') {
writeChar(writer, " ");
}
LineMarkerInfo marker = getMethodSeparator(i + 1);
if (marker == null) {
writer.write('\n');
}
else {
writer.write("<hr class=\"" + htmlStyleManager.getSeparatorClassName(marker.separatorColor) + "\">");
}
writeLineNumber(writer);
}
else {
writeChar(writer, String.valueOf(c));
}
}
}
private void writeChar(Writer writer, String s) throws IOException {
writer.write(s);
myColumn++;
}
private void writeLineNumber(@NonNls Writer writer) throws IOException {
myColumn = 0;
lineCount++;
if (myPrintLineNumbers) {
writer.write("<a name=\"l" + lineCount + "\">");
// String numberCloseTag = writeFontTag(writer, ourLineNumberAttributes);
writer.write("<span class=\"ln\">");
String s = Integer.toString(lineCount);
writer.write(s);
int extraSpaces = 4 - s.length();
do {
writer.write(' ');
} while (extraSpaces-- > 0);
writer.write("</span></a>");
}
}
private void writeHeader(@NonNls Writer writer, @Nullable String title) throws IOException {
writer.write("<html>\n");
writer.write("<head>\n");
writer.write("<title>" + title + "</title>\n");
writer.write("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n");
ensureStyles();
htmlStyleManager.writeStyleTag(writer, myPrintLineNumbers);
writer.write("</head>\n");
EditorColorsScheme scheme = EditorColorsManager.getInstance().getGlobalScheme();
writer.write("<body bgcolor=\"" + ColorUtil.toHtmlColor(scheme.getDefaultBackground()) + "\">\n");
writer.write("<table CELLSPACING=0 CELLPADDING=5 COLS=1 WIDTH=\"100%\" BGCOLOR=\"#" + ColorUtil.toHex(new JBColor(Gray.xC0, Gray.x60)) + "\" >\n");
writer.write("<tr><td><center>\n");
writer.write("<font face=\"Arial, Helvetica\" color=\"#000000\">\n");
writer.write(title + "</font>\n");
writer.write("</center></td></tr></table>\n");
}
private static boolean equals(TextAttributes attributes1, TextAttributes attributes2) {
if (attributes2 == null) {
return attributes1 == null;
}
if(attributes1 == null) {
return false;
}
if(!Comparing.equal(attributes1.getForegroundColor(), attributes2.getForegroundColor())) {
return false;
}
if(attributes1.getFontType() != attributes2.getFontType()) {
return false;
}
if(!Comparing.equal(attributes1.getBackgroundColor(), attributes2.getBackgroundColor())) {
return false;
}
if(!Comparing.equal(attributes1.getEffectColor(), attributes2.getEffectColor())) {
return false;
}
return true;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.