gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (C) 2014 Lable (info@lable.nl) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.lable.oss.uniqueid.bytes; import java.nio.ByteBuffer; import static org.lable.oss.uniqueid.ParameterUtil.assertNotNullEightBytes; /** * Composes and deconstructs the special eight byte identifiers generated by this library. * <p> * The eight byte ID is composed as follows: * * <pre>TTTTTTTT TTTTTTTT TTTTTTTT TTTTTTTT TTTTTTTT TTSSSSSS GGGMGGGG GGGGCCCC</pre> * * <ul> * <li><code>T</code>: Timestamp (in milliseconds, bit order depends on mode) * <li><code>S</code>: Sequence counter * <li><code>.</code>: Reserved for future use * <li><code>M</code>: Mode * <li><code>G</code>: Generator ID * <li><code>C</code>: Cluster ID * </ul> * * Because only 42 bits are assigned to represent the timestamp in the generated ID, the timestamp used must take place * between the Unix epoch (1970-01-01T00:00:00.000 UTC) and 2109. */ public class IDBuilder { /** * Perform all the byte mangling needed to create the eight byte ID. * * @param blueprint Blueprint containing all needed data to work with. * @return The 8-byte ID. */ public static byte[] build(Blueprint blueprint) { // First 42 bits are the timestamp. // [0] TTTTTTTT [1] TTTTTTTT [2] TTTTTTTT [3] TTTTTTTT [4] TTTTTTTT [5] TT...... ByteBuffer bb = ByteBuffer.allocate(8); switch (blueprint.getMode()) { case SPREAD: long reverseTimestamp = Long.reverse(blueprint.getTimestamp()); bb.putLong(reverseTimestamp); break; case TIME_SEQUENTIAL: long timestamp = blueprint.getTimestamp(); bb.putLong(timestamp << 22); break; } byte[] tsBytes = bb.array(); // Last 6 bits of byte 6 are for the sequence counter. The first two bits are from the timestamp. // [5] TTSSSSSS int or = tsBytes[5] | (byte) blueprint.getSequence(); tsBytes[5] = (byte) or; // Last two bytes. The mode flag, generator ID, and cluster ID. // [6] GGGMGGGG [7] GGGGCCCC int flagGeneratorCluster = (blueprint.getGeneratorId() << 5) & 0xE000; flagGeneratorCluster += (blueprint.getGeneratorId() & 0x00FF) << 4; flagGeneratorCluster += blueprint.getClusterId(); flagGeneratorCluster += blueprint.getMode().getModeMask() << 12; tsBytes[7] = (byte) flagGeneratorCluster; flagGeneratorCluster >>>= 8; tsBytes[6] = (byte) flagGeneratorCluster; return tsBytes; } /** * Decompose a generated ID into its {@link Blueprint}. * * @param id Eight byte ID to parse. * @return A blueprint containing the four ID components. */ public static Blueprint parse(byte[] id) { assertNotNullEightBytes(id); int sequence = parseSequenceIdNoChecks(id); int generatorId = parseGeneratorIdNoChecks(id); int clusterId = parseClusterIdNoChecks(id); long timestamp = parseTimestampNoChecks(id); Mode mode = parseModeNoChecks(id); return new Blueprint(timestamp, sequence, generatorId, clusterId, mode); } /** * Find the sequence number in an identifier. * * @param id Identifier. * @return The sequence number, if {@code id} is a byte array with length eight. */ public static int parseSequenceId(byte[] id) { assertNotNullEightBytes(id); return parseSequenceIdNoChecks(id); } /** * Find the generator id in an identifier. * * @param id Identifier. * @return The generator id, if {@code id} is a byte array with length eight. */ public static int parseGeneratorId(byte[] id) { assertNotNullEightBytes(id); return parseGeneratorIdNoChecks(id); } /** * Find the cluster id in an identifier. * * @param id Identifier. * @return The cluster id, if {@code id} is a byte array with length eight. */ public static int parseClusterId(byte[] id) { assertNotNullEightBytes(id); return parseClusterIdNoChecks(id); } /** * Find the timestamp in an identifier. * * @param id Identifier. * @return The timestamp, if {@code id} is a byte array with length eight. */ public static long parseTimestamp(byte[] id) { assertNotNullEightBytes(id); return parseTimestampNoChecks(id); } /** * Find the ID mode used to construct the identifier. * * @param id Identifier. * @return The {@link Mode}, if {@code id} is a byte array with length eight. */ public static Mode parseMode(byte[] id) { assertNotNullEightBytes(id); return parseModeNoChecks(id); } // The private methods skip the null and length check on the id, because the method calling them took care of that. private static int parseSequenceIdNoChecks(byte[] id) { // [5] ..SSSSSS return id[5] & 0x3F; } private static int parseGeneratorIdNoChecks(byte[] id) { // [6] GGG.GGGG [7] GGGG.... return (id[7] >> 4 & 0x0F) | (id[6] << 3 & 0x0700) | (id[6] << 4 & 0xF0); } private static int parseClusterIdNoChecks(byte[] id) { // [7] ....CCCC return id[7] & 0x0F; } private static long parseTimestampNoChecks(byte[] id) { Mode mode = parseModeNoChecks(id); switch (mode) { case TIME_SEQUENTIAL: return parseTimestampNoChecksTime(id); case SPREAD: default: return parseTimestampNoChecksSpread(id); } } private static long parseTimestampNoChecksSpread(byte[] id) { byte[] copy = id.clone(); // Clear everything but the first 42 bits for the timestamp. // [0] TTTTTTTT [1] TTTTTTTT [2] TTTTTTTT [3] TTTTTTTT [4] TTTTTTTT [5] TT...... copy[5] = (byte) (copy[5] & 0xC0); copy[6] = 0; copy[7] = 0; ByteBuffer bb = ByteBuffer.wrap(copy); return Long.reverse(bb.getLong()); } private static long parseTimestampNoChecksTime(byte[] id) { byte[] copy = id.clone(); ByteBuffer bb = ByteBuffer.wrap(copy); long ts = bb.getLong(); ts >>>= 22; return ts; } private static Mode parseModeNoChecks(byte[] id) { // [6] ...M.... int modeMask = id[6] >> 4 & 0x01; return Mode.fromModeMask(modeMask); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p4beta1/geometry.proto package com.google.cloud.vision.v1p4beta1; /** * * * <pre> * A bounding polygon for the detected image annotation. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.BoundingPoly} */ public final class BoundingPoly extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.BoundingPoly) BoundingPolyOrBuilder { private static final long serialVersionUID = 0L; // Use BoundingPoly.newBuilder() to construct. private BoundingPoly(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BoundingPoly() { vertices_ = java.util.Collections.emptyList(); normalizedVertices_ = java.util.Collections.emptyList(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BoundingPoly( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { vertices_ = new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.Vertex>(); mutable_bitField0_ |= 0x00000001; } vertices_.add( input.readMessage( com.google.cloud.vision.v1p4beta1.Vertex.parser(), extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { normalizedVertices_ = new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.NormalizedVertex>(); mutable_bitField0_ |= 0x00000002; } normalizedVertices_.add( input.readMessage( com.google.cloud.vision.v1p4beta1.NormalizedVertex.parser(), extensionRegistry)); break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { vertices_ = java.util.Collections.unmodifiableList(vertices_); } if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { normalizedVertices_ = java.util.Collections.unmodifiableList(normalizedVertices_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.GeometryProto .internal_static_google_cloud_vision_v1p4beta1_BoundingPoly_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.GeometryProto .internal_static_google_cloud_vision_v1p4beta1_BoundingPoly_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.BoundingPoly.class, com.google.cloud.vision.v1p4beta1.BoundingPoly.Builder.class); } public static final int VERTICES_FIELD_NUMBER = 1; private java.util.List<com.google.cloud.vision.v1p4beta1.Vertex> vertices_; /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.Vertex> getVerticesList() { return vertices_; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public java.util.List<? extends com.google.cloud.vision.v1p4beta1.VertexOrBuilder> getVerticesOrBuilderList() { return vertices_; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public int getVerticesCount() { return vertices_.size(); } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public com.google.cloud.vision.v1p4beta1.Vertex getVertices(int index) { return vertices_.get(index); } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public com.google.cloud.vision.v1p4beta1.VertexOrBuilder getVerticesOrBuilder(int index) { return vertices_.get(index); } public static final int NORMALIZED_VERTICES_FIELD_NUMBER = 2; private java.util.List<com.google.cloud.vision.v1p4beta1.NormalizedVertex> normalizedVertices_; /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2;</code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.NormalizedVertex> getNormalizedVerticesList() { return normalizedVertices_; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2;</code> */ public java.util.List<? extends com.google.cloud.vision.v1p4beta1.NormalizedVertexOrBuilder> getNormalizedVerticesOrBuilderList() { return normalizedVertices_; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2;</code> */ public int getNormalizedVerticesCount() { return normalizedVertices_.size(); } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2;</code> */ public com.google.cloud.vision.v1p4beta1.NormalizedVertex getNormalizedVertices(int index) { return normalizedVertices_.get(index); } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2;</code> */ public com.google.cloud.vision.v1p4beta1.NormalizedVertexOrBuilder getNormalizedVerticesOrBuilder( int index) { return normalizedVertices_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < vertices_.size(); i++) { output.writeMessage(1, vertices_.get(i)); } for (int i = 0; i < normalizedVertices_.size(); i++) { output.writeMessage(2, normalizedVertices_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < vertices_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, vertices_.get(i)); } for (int i = 0; i < normalizedVertices_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, normalizedVertices_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p4beta1.BoundingPoly)) { return super.equals(obj); } com.google.cloud.vision.v1p4beta1.BoundingPoly other = (com.google.cloud.vision.v1p4beta1.BoundingPoly) obj; boolean result = true; result = result && getVerticesList().equals(other.getVerticesList()); result = result && getNormalizedVerticesList().equals(other.getNormalizedVerticesList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getVerticesCount() > 0) { hash = (37 * hash) + VERTICES_FIELD_NUMBER; hash = (53 * hash) + getVerticesList().hashCode(); } if (getNormalizedVerticesCount() > 0) { hash = (37 * hash) + NORMALIZED_VERTICES_FIELD_NUMBER; hash = (53 * hash) + getNormalizedVerticesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.vision.v1p4beta1.BoundingPoly prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A bounding polygon for the detected image annotation. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.BoundingPoly} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.BoundingPoly) com.google.cloud.vision.v1p4beta1.BoundingPolyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.GeometryProto .internal_static_google_cloud_vision_v1p4beta1_BoundingPoly_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.GeometryProto .internal_static_google_cloud_vision_v1p4beta1_BoundingPoly_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.BoundingPoly.class, com.google.cloud.vision.v1p4beta1.BoundingPoly.Builder.class); } // Construct using com.google.cloud.vision.v1p4beta1.BoundingPoly.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getVerticesFieldBuilder(); getNormalizedVerticesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (verticesBuilder_ == null) { vertices_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { verticesBuilder_.clear(); } if (normalizedVerticesBuilder_ == null) { normalizedVertices_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { normalizedVerticesBuilder_.clear(); } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p4beta1.GeometryProto .internal_static_google_cloud_vision_v1p4beta1_BoundingPoly_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.BoundingPoly getDefaultInstanceForType() { return com.google.cloud.vision.v1p4beta1.BoundingPoly.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p4beta1.BoundingPoly build() { com.google.cloud.vision.v1p4beta1.BoundingPoly result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.BoundingPoly buildPartial() { com.google.cloud.vision.v1p4beta1.BoundingPoly result = new com.google.cloud.vision.v1p4beta1.BoundingPoly(this); int from_bitField0_ = bitField0_; if (verticesBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { vertices_ = java.util.Collections.unmodifiableList(vertices_); bitField0_ = (bitField0_ & ~0x00000001); } result.vertices_ = vertices_; } else { result.vertices_ = verticesBuilder_.build(); } if (normalizedVerticesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { normalizedVertices_ = java.util.Collections.unmodifiableList(normalizedVertices_); bitField0_ = (bitField0_ & ~0x00000002); } result.normalizedVertices_ = normalizedVertices_; } else { result.normalizedVertices_ = normalizedVerticesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p4beta1.BoundingPoly) { return mergeFrom((com.google.cloud.vision.v1p4beta1.BoundingPoly) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p4beta1.BoundingPoly other) { if (other == com.google.cloud.vision.v1p4beta1.BoundingPoly.getDefaultInstance()) return this; if (verticesBuilder_ == null) { if (!other.vertices_.isEmpty()) { if (vertices_.isEmpty()) { vertices_ = other.vertices_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureVerticesIsMutable(); vertices_.addAll(other.vertices_); } onChanged(); } } else { if (!other.vertices_.isEmpty()) { if (verticesBuilder_.isEmpty()) { verticesBuilder_.dispose(); verticesBuilder_ = null; vertices_ = other.vertices_; bitField0_ = (bitField0_ & ~0x00000001); verticesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getVerticesFieldBuilder() : null; } else { verticesBuilder_.addAllMessages(other.vertices_); } } } if (normalizedVerticesBuilder_ == null) { if (!other.normalizedVertices_.isEmpty()) { if (normalizedVertices_.isEmpty()) { normalizedVertices_ = other.normalizedVertices_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureNormalizedVerticesIsMutable(); normalizedVertices_.addAll(other.normalizedVertices_); } onChanged(); } } else { if (!other.normalizedVertices_.isEmpty()) { if (normalizedVerticesBuilder_.isEmpty()) { normalizedVerticesBuilder_.dispose(); normalizedVerticesBuilder_ = null; normalizedVertices_ = other.normalizedVertices_; bitField0_ = (bitField0_ & ~0x00000002); normalizedVerticesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNormalizedVerticesFieldBuilder() : null; } else { normalizedVerticesBuilder_.addAllMessages(other.normalizedVertices_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.vision.v1p4beta1.BoundingPoly parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.vision.v1p4beta1.BoundingPoly) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.cloud.vision.v1p4beta1.Vertex> vertices_ = java.util.Collections.emptyList(); private void ensureVerticesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { vertices_ = new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.Vertex>(vertices_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Vertex, com.google.cloud.vision.v1p4beta1.Vertex.Builder, com.google.cloud.vision.v1p4beta1.VertexOrBuilder> verticesBuilder_; /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.Vertex> getVerticesList() { if (verticesBuilder_ == null) { return java.util.Collections.unmodifiableList(vertices_); } else { return verticesBuilder_.getMessageList(); } } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public int getVerticesCount() { if (verticesBuilder_ == null) { return vertices_.size(); } else { return verticesBuilder_.getCount(); } } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public com.google.cloud.vision.v1p4beta1.Vertex getVertices(int index) { if (verticesBuilder_ == null) { return vertices_.get(index); } else { return verticesBuilder_.getMessage(index); } } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder setVertices(int index, com.google.cloud.vision.v1p4beta1.Vertex value) { if (verticesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVerticesIsMutable(); vertices_.set(index, value); onChanged(); } else { verticesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder setVertices( int index, com.google.cloud.vision.v1p4beta1.Vertex.Builder builderForValue) { if (verticesBuilder_ == null) { ensureVerticesIsMutable(); vertices_.set(index, builderForValue.build()); onChanged(); } else { verticesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder addVertices(com.google.cloud.vision.v1p4beta1.Vertex value) { if (verticesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVerticesIsMutable(); vertices_.add(value); onChanged(); } else { verticesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder addVertices(int index, com.google.cloud.vision.v1p4beta1.Vertex value) { if (verticesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVerticesIsMutable(); vertices_.add(index, value); onChanged(); } else { verticesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder addVertices(com.google.cloud.vision.v1p4beta1.Vertex.Builder builderForValue) { if (verticesBuilder_ == null) { ensureVerticesIsMutable(); vertices_.add(builderForValue.build()); onChanged(); } else { verticesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder addVertices( int index, com.google.cloud.vision.v1p4beta1.Vertex.Builder builderForValue) { if (verticesBuilder_ == null) { ensureVerticesIsMutable(); vertices_.add(index, builderForValue.build()); onChanged(); } else { verticesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder addAllVertices( java.lang.Iterable<? extends com.google.cloud.vision.v1p4beta1.Vertex> values) { if (verticesBuilder_ == null) { ensureVerticesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, vertices_); onChanged(); } else { verticesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder clearVertices() { if (verticesBuilder_ == null) { vertices_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { verticesBuilder_.clear(); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public Builder removeVertices(int index) { if (verticesBuilder_ == null) { ensureVerticesIsMutable(); vertices_.remove(index); onChanged(); } else { verticesBuilder_.remove(index); } return this; } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public com.google.cloud.vision.v1p4beta1.Vertex.Builder getVerticesBuilder(int index) { return getVerticesFieldBuilder().getBuilder(index); } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public com.google.cloud.vision.v1p4beta1.VertexOrBuilder getVerticesOrBuilder(int index) { if (verticesBuilder_ == null) { return vertices_.get(index); } else { return verticesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public java.util.List<? extends com.google.cloud.vision.v1p4beta1.VertexOrBuilder> getVerticesOrBuilderList() { if (verticesBuilder_ != null) { return verticesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(vertices_); } } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public com.google.cloud.vision.v1p4beta1.Vertex.Builder addVerticesBuilder() { return getVerticesFieldBuilder() .addBuilder(com.google.cloud.vision.v1p4beta1.Vertex.getDefaultInstance()); } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public com.google.cloud.vision.v1p4beta1.Vertex.Builder addVerticesBuilder(int index) { return getVerticesFieldBuilder() .addBuilder(index, com.google.cloud.vision.v1p4beta1.Vertex.getDefaultInstance()); } /** * * * <pre> * The bounding polygon vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Vertex vertices = 1;</code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.Vertex.Builder> getVerticesBuilderList() { return getVerticesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Vertex, com.google.cloud.vision.v1p4beta1.Vertex.Builder, com.google.cloud.vision.v1p4beta1.VertexOrBuilder> getVerticesFieldBuilder() { if (verticesBuilder_ == null) { verticesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Vertex, com.google.cloud.vision.v1p4beta1.Vertex.Builder, com.google.cloud.vision.v1p4beta1.VertexOrBuilder>( vertices_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); vertices_ = null; } return verticesBuilder_; } private java.util.List<com.google.cloud.vision.v1p4beta1.NormalizedVertex> normalizedVertices_ = java.util.Collections.emptyList(); private void ensureNormalizedVerticesIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { normalizedVertices_ = new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.NormalizedVertex>( normalizedVertices_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.NormalizedVertex, com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder, com.google.cloud.vision.v1p4beta1.NormalizedVertexOrBuilder> normalizedVerticesBuilder_; /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.NormalizedVertex> getNormalizedVerticesList() { if (normalizedVerticesBuilder_ == null) { return java.util.Collections.unmodifiableList(normalizedVertices_); } else { return normalizedVerticesBuilder_.getMessageList(); } } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public int getNormalizedVerticesCount() { if (normalizedVerticesBuilder_ == null) { return normalizedVertices_.size(); } else { return normalizedVerticesBuilder_.getCount(); } } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public com.google.cloud.vision.v1p4beta1.NormalizedVertex getNormalizedVertices(int index) { if (normalizedVerticesBuilder_ == null) { return normalizedVertices_.get(index); } else { return normalizedVerticesBuilder_.getMessage(index); } } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder setNormalizedVertices( int index, com.google.cloud.vision.v1p4beta1.NormalizedVertex value) { if (normalizedVerticesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNormalizedVerticesIsMutable(); normalizedVertices_.set(index, value); onChanged(); } else { normalizedVerticesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder setNormalizedVertices( int index, com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder builderForValue) { if (normalizedVerticesBuilder_ == null) { ensureNormalizedVerticesIsMutable(); normalizedVertices_.set(index, builderForValue.build()); onChanged(); } else { normalizedVerticesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder addNormalizedVertices(com.google.cloud.vision.v1p4beta1.NormalizedVertex value) { if (normalizedVerticesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNormalizedVerticesIsMutable(); normalizedVertices_.add(value); onChanged(); } else { normalizedVerticesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder addNormalizedVertices( int index, com.google.cloud.vision.v1p4beta1.NormalizedVertex value) { if (normalizedVerticesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNormalizedVerticesIsMutable(); normalizedVertices_.add(index, value); onChanged(); } else { normalizedVerticesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder addNormalizedVertices( com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder builderForValue) { if (normalizedVerticesBuilder_ == null) { ensureNormalizedVerticesIsMutable(); normalizedVertices_.add(builderForValue.build()); onChanged(); } else { normalizedVerticesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder addNormalizedVertices( int index, com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder builderForValue) { if (normalizedVerticesBuilder_ == null) { ensureNormalizedVerticesIsMutable(); normalizedVertices_.add(index, builderForValue.build()); onChanged(); } else { normalizedVerticesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder addAllNormalizedVertices( java.lang.Iterable<? extends com.google.cloud.vision.v1p4beta1.NormalizedVertex> values) { if (normalizedVerticesBuilder_ == null) { ensureNormalizedVerticesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, normalizedVertices_); onChanged(); } else { normalizedVerticesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder clearNormalizedVertices() { if (normalizedVerticesBuilder_ == null) { normalizedVertices_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { normalizedVerticesBuilder_.clear(); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public Builder removeNormalizedVertices(int index) { if (normalizedVerticesBuilder_ == null) { ensureNormalizedVerticesIsMutable(); normalizedVertices_.remove(index); onChanged(); } else { normalizedVerticesBuilder_.remove(index); } return this; } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder getNormalizedVerticesBuilder( int index) { return getNormalizedVerticesFieldBuilder().getBuilder(index); } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public com.google.cloud.vision.v1p4beta1.NormalizedVertexOrBuilder getNormalizedVerticesOrBuilder(int index) { if (normalizedVerticesBuilder_ == null) { return normalizedVertices_.get(index); } else { return normalizedVerticesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public java.util.List<? extends com.google.cloud.vision.v1p4beta1.NormalizedVertexOrBuilder> getNormalizedVerticesOrBuilderList() { if (normalizedVerticesBuilder_ != null) { return normalizedVerticesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(normalizedVertices_); } } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder addNormalizedVerticesBuilder() { return getNormalizedVerticesFieldBuilder() .addBuilder(com.google.cloud.vision.v1p4beta1.NormalizedVertex.getDefaultInstance()); } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder addNormalizedVerticesBuilder( int index) { return getNormalizedVerticesFieldBuilder() .addBuilder( index, com.google.cloud.vision.v1p4beta1.NormalizedVertex.getDefaultInstance()); } /** * * * <pre> * The bounding polygon normalized vertices. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.NormalizedVertex normalized_vertices = 2; * </code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder> getNormalizedVerticesBuilderList() { return getNormalizedVerticesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.NormalizedVertex, com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder, com.google.cloud.vision.v1p4beta1.NormalizedVertexOrBuilder> getNormalizedVerticesFieldBuilder() { if (normalizedVerticesBuilder_ == null) { normalizedVerticesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.NormalizedVertex, com.google.cloud.vision.v1p4beta1.NormalizedVertex.Builder, com.google.cloud.vision.v1p4beta1.NormalizedVertexOrBuilder>( normalizedVertices_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); normalizedVertices_ = null; } return normalizedVerticesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.BoundingPoly) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.BoundingPoly) private static final com.google.cloud.vision.v1p4beta1.BoundingPoly DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.BoundingPoly(); } public static com.google.cloud.vision.v1p4beta1.BoundingPoly getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BoundingPoly> PARSER = new com.google.protobuf.AbstractParser<BoundingPoly>() { @java.lang.Override public BoundingPoly parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BoundingPoly(input, extensionRegistry); } }; public static com.google.protobuf.Parser<BoundingPoly> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BoundingPoly> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.BoundingPoly getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.security.token.delegation; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.crypto.SecretKey; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.HadoopKerberosName; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.Daemon; import org.apache.hadoop.util.Time; import com.google.common.base.Preconditions; @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "Hive"}) @InterfaceStability.Evolving public abstract class AbstractDelegationTokenSecretManager<TokenIdent extends AbstractDelegationTokenIdentifier> extends SecretManager<TokenIdent> { private static final Log LOG = LogFactory .getLog(AbstractDelegationTokenSecretManager.class); /** * Cache of currently valid tokens, mapping from DelegationTokenIdentifier * to DelegationTokenInformation. Protected by this object lock. */ protected final Map<TokenIdent, DelegationTokenInformation> currentTokens = new HashMap<TokenIdent, DelegationTokenInformation>(); /** * Sequence number to create DelegationTokenIdentifier. * Protected by this object lock. */ protected int delegationTokenSequenceNumber = 0; /** * Access to allKeys is protected by this object lock */ protected final Map<Integer, DelegationKey> allKeys = new HashMap<Integer, DelegationKey>(); /** * Access to currentId is protected by this object lock. */ protected int currentId = 0; /** * Access to currentKey is protected by this object lock */ private DelegationKey currentKey; private long keyUpdateInterval; private long tokenMaxLifetime; private long tokenRemoverScanInterval; private long tokenRenewInterval; /** * Whether to store a token's tracking ID in its TokenInformation. * Can be overridden by a subclass. */ protected boolean storeTokenTrackingId; private Thread tokenRemoverThread; protected volatile boolean running; /** * If the delegation token update thread holds this lock, it will * not get interrupted. */ protected Object noInterruptsLock = new Object(); /** * Create a secret manager * @param delegationKeyUpdateInterval the number of milliseconds for rolling * new secret keys. * @param delegationTokenMaxLifetime the maximum lifetime of the delegation * tokens in milliseconds * @param delegationTokenRenewInterval how often the tokens must be renewed * in milliseconds * @param delegationTokenRemoverScanInterval how often the tokens are scanned * for expired tokens in milliseconds */ public AbstractDelegationTokenSecretManager(long delegationKeyUpdateInterval, long delegationTokenMaxLifetime, long delegationTokenRenewInterval, long delegationTokenRemoverScanInterval) { this.keyUpdateInterval = delegationKeyUpdateInterval; this.tokenMaxLifetime = delegationTokenMaxLifetime; this.tokenRenewInterval = delegationTokenRenewInterval; this.tokenRemoverScanInterval = delegationTokenRemoverScanInterval; this.storeTokenTrackingId = false; } /** should be called before this object is used */ public void startThreads() throws IOException { Preconditions.checkState(!running); updateCurrentKey(); synchronized (this) { running = true; tokenRemoverThread = new Daemon(new ExpiredTokenRemover()); tokenRemoverThread.start(); } } /** * Reset all data structures and mutable state. */ public synchronized void reset() { setCurrentKeyId(0); allKeys.clear(); setDelegationTokenSeqNum(0); currentTokens.clear(); } /** * Add a previously used master key to cache (when NN restarts), * should be called before activate(). * */ public synchronized void addKey(DelegationKey key) throws IOException { if (running) // a safety check throw new IOException("Can't add delegation key to a running SecretManager."); if (key.getKeyId() > getCurrentKeyId()) { setCurrentKeyId(key.getKeyId()); } allKeys.put(key.getKeyId(), key); } public synchronized DelegationKey[] getAllKeys() { return allKeys.values().toArray(new DelegationKey[0]); } // HDFS protected void logUpdateMasterKey(DelegationKey key) throws IOException { return; } // HDFS protected void logExpireToken(TokenIdent ident) throws IOException { return; } // RM protected void storeNewMasterKey(DelegationKey key) throws IOException { return; } // RM protected void removeStoredMasterKey(DelegationKey key) { return; } // RM protected void storeNewToken(TokenIdent ident, long renewDate) throws IOException{ return; } // RM protected void removeStoredToken(TokenIdent ident) throws IOException { } // RM protected void updateStoredToken(TokenIdent ident, long renewDate) throws IOException { return; } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected synchronized int getCurrentKeyId() { return currentId; } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected synchronized int incrementCurrentKeyId() { return ++currentId; } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected synchronized void setCurrentKeyId(int keyId) { currentId = keyId; } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected synchronized int getDelegationTokenSeqNum() { return delegationTokenSequenceNumber; } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected synchronized int incrementDelegationTokenSeqNum() { return ++delegationTokenSequenceNumber; } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected synchronized void setDelegationTokenSeqNum(int seqNum) { delegationTokenSequenceNumber = seqNum; } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected DelegationKey getDelegationKey(int keyId) { return allKeys.get(keyId); } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected void storeDelegationKey(DelegationKey key) throws IOException { allKeys.put(key.getKeyId(), key); storeNewMasterKey(key); } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected void updateDelegationKey(DelegationKey key) throws IOException { allKeys.put(key.getKeyId(), key); } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected DelegationTokenInformation getTokenInfo(TokenIdent ident) { return currentTokens.get(ident); } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected void storeToken(TokenIdent ident, DelegationTokenInformation tokenInfo) throws IOException { currentTokens.put(ident, tokenInfo); storeNewToken(ident, tokenInfo.getRenewDate()); } /** * For subclasses externalizing the storage, for example Zookeeper * based implementations */ protected void updateToken(TokenIdent ident, DelegationTokenInformation tokenInfo) throws IOException { currentTokens.put(ident, tokenInfo); updateStoredToken(ident, tokenInfo.getRenewDate()); } /** * This method is intended to be used for recovering persisted delegation * tokens * This method must be called before this secret manager is activated (before * startThreads() is called) * @param identifier identifier read from persistent storage * @param renewDate token renew time * @throws IOException */ public synchronized void addPersistedDelegationToken( TokenIdent identifier, long renewDate) throws IOException { if (running) { // a safety check throw new IOException( "Can't add persisted delegation token to a running SecretManager."); } int keyId = identifier.getMasterKeyId(); DelegationKey dKey = allKeys.get(keyId); if (dKey == null) { LOG.warn("No KEY found for persisted identifier " + identifier.toString()); return; } byte[] password = createPassword(identifier.getBytes(), dKey.getKey()); if (identifier.getSequenceNumber() > getDelegationTokenSeqNum()) { setDelegationTokenSeqNum(identifier.getSequenceNumber()); } if (getTokenInfo(identifier) == null) { currentTokens.put(identifier, new DelegationTokenInformation(renewDate, password, getTrackingIdIfEnabled(identifier))); } else { throw new IOException("Same delegation token being added twice."); } } /** * Update the current master key * This is called once by startThreads before tokenRemoverThread is created, * and only by tokenRemoverThread afterwards. */ private void updateCurrentKey() throws IOException { LOG.info("Updating the current master key for generating delegation tokens"); /* Create a new currentKey with an estimated expiry date. */ int newCurrentId; synchronized (this) { newCurrentId = incrementCurrentKeyId(); } DelegationKey newKey = new DelegationKey(newCurrentId, System .currentTimeMillis() + keyUpdateInterval + tokenMaxLifetime, generateSecret()); //Log must be invoked outside the lock on 'this' logUpdateMasterKey(newKey); synchronized (this) { currentKey = newKey; storeDelegationKey(currentKey); } } /** * Update the current master key for generating delegation tokens * It should be called only by tokenRemoverThread. */ void rollMasterKey() throws IOException { synchronized (this) { removeExpiredKeys(); /* set final expiry date for retiring currentKey */ currentKey.setExpiryDate(Time.now() + tokenMaxLifetime); /* * currentKey might have been removed by removeExpiredKeys(), if * updateMasterKey() isn't called at expected interval. Add it back to * allKeys just in case. */ updateDelegationKey(currentKey); } updateCurrentKey(); } private synchronized void removeExpiredKeys() { long now = Time.now(); for (Iterator<Map.Entry<Integer, DelegationKey>> it = allKeys.entrySet() .iterator(); it.hasNext();) { Map.Entry<Integer, DelegationKey> e = it.next(); if (e.getValue().getExpiryDate() < now) { it.remove(); // ensure the tokens generated by this current key can be recovered // with this current key after this current key is rolled if(!e.getValue().equals(currentKey)) removeStoredMasterKey(e.getValue()); } } } @Override protected synchronized byte[] createPassword(TokenIdent identifier) { int sequenceNum; long now = Time.now(); sequenceNum = incrementDelegationTokenSeqNum(); identifier.setIssueDate(now); identifier.setMaxDate(now + tokenMaxLifetime); identifier.setMasterKeyId(currentKey.getKeyId()); identifier.setSequenceNumber(sequenceNum); LOG.info("Creating password for identifier: " + identifier + ", currentKey: " + currentKey.getKeyId()); byte[] password = createPassword(identifier.getBytes(), currentKey.getKey()); DelegationTokenInformation tokenInfo = new DelegationTokenInformation(now + tokenRenewInterval, password, getTrackingIdIfEnabled(identifier)); try { storeToken(identifier, tokenInfo); } catch (IOException ioe) { LOG.error("Could not store token !!", ioe); } return password; } /** * Find the DelegationTokenInformation for the given token id, and verify that * if the token is expired. Note that this method should be called with * acquiring the secret manager's monitor. */ protected DelegationTokenInformation checkToken(TokenIdent identifier) throws InvalidToken { assert Thread.holdsLock(this); DelegationTokenInformation info = getTokenInfo(identifier); if (info == null) { throw new InvalidToken("token (" + identifier.toString() + ") can't be found in cache"); } if (info.getRenewDate() < Time.now()) { throw new InvalidToken("token (" + identifier.toString() + ") is expired"); } return info; } @Override public synchronized byte[] retrievePassword(TokenIdent identifier) throws InvalidToken { return checkToken(identifier).getPassword(); } protected String getTrackingIdIfEnabled(TokenIdent ident) { if (storeTokenTrackingId) { return ident.getTrackingId(); } return null; } public synchronized String getTokenTrackingId(TokenIdent identifier) { DelegationTokenInformation info = getTokenInfo(identifier); if (info == null) { return null; } return info.getTrackingId(); } /** * Verifies that the given identifier and password are valid and match. * @param identifier Token identifier. * @param password Password in the token. * @throws InvalidToken */ public synchronized void verifyToken(TokenIdent identifier, byte[] password) throws InvalidToken { byte[] storedPassword = retrievePassword(identifier); if (!Arrays.equals(password, storedPassword)) { throw new InvalidToken("token (" + identifier + ") is invalid, password doesn't match"); } } /** * Renew a delegation token. * @param token the token to renew * @param renewer the full principal name of the user doing the renewal * @return the new expiration time * @throws InvalidToken if the token is invalid * @throws AccessControlException if the user can't renew token */ public synchronized long renewToken(Token<TokenIdent> token, String renewer) throws InvalidToken, IOException { ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier()); DataInputStream in = new DataInputStream(buf); TokenIdent id = createIdentifier(); id.readFields(in); LOG.info("Token renewal for identifier: " + id + "; total currentTokens " + currentTokens.size()); long now = Time.now(); if (id.getMaxDate() < now) { throw new InvalidToken(renewer + " tried to renew an expired token"); } if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) { throw new AccessControlException(renewer + " tried to renew a token without a renewer"); } if (!id.getRenewer().toString().equals(renewer)) { throw new AccessControlException(renewer + " tries to renew a token with renewer " + id.getRenewer()); } DelegationKey key = getDelegationKey(id.getMasterKeyId()); if (key == null) { throw new InvalidToken("Unable to find master key for keyId=" + id.getMasterKeyId() + " from cache. Failed to renew an unexpired token" + " with sequenceNumber=" + id.getSequenceNumber()); } byte[] password = createPassword(token.getIdentifier(), key.getKey()); if (!Arrays.equals(password, token.getPassword())) { throw new AccessControlException(renewer + " is trying to renew a token with wrong password"); } long renewTime = Math.min(id.getMaxDate(), now + tokenRenewInterval); String trackingId = getTrackingIdIfEnabled(id); DelegationTokenInformation info = new DelegationTokenInformation(renewTime, password, trackingId); if (getTokenInfo(id) == null) { throw new InvalidToken("Renewal request for unknown token"); } updateToken(id, info); return renewTime; } /** * Cancel a token by removing it from cache. * @return Identifier of the canceled token * @throws InvalidToken for invalid token * @throws AccessControlException if the user isn't allowed to cancel */ public synchronized TokenIdent cancelToken(Token<TokenIdent> token, String canceller) throws IOException { ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier()); DataInputStream in = new DataInputStream(buf); TokenIdent id = createIdentifier(); id.readFields(in); LOG.info("Token cancelation requested for identifier: "+id); if (id.getUser() == null) { throw new InvalidToken("Token with no owner"); } String owner = id.getUser().getUserName(); Text renewer = id.getRenewer(); HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller); String cancelerShortName = cancelerKrbName.getShortName(); if (!canceller.equals(owner) && (renewer == null || renewer.toString().isEmpty() || !cancelerShortName .equals(renewer.toString()))) { throw new AccessControlException(canceller + " is not authorized to cancel the token"); } DelegationTokenInformation info = currentTokens.remove(id); if (info == null) { throw new InvalidToken("Token not found"); } removeStoredToken(id); return id; } /** * Convert the byte[] to a secret key * @param key the byte[] to create the secret key from * @return the secret key */ public static SecretKey createSecretKey(byte[] key) { return SecretManager.createSecretKey(key); } /** Class to encapsulate a token's renew date and password. */ @InterfaceStability.Evolving public static class DelegationTokenInformation { long renewDate; byte[] password; String trackingId; public DelegationTokenInformation(long renewDate, byte[] password) { this(renewDate, password, null); } public DelegationTokenInformation(long renewDate, byte[] password, String trackingId) { this.renewDate = renewDate; this.password = password; this.trackingId = trackingId; } /** returns renew date */ public long getRenewDate() { return renewDate; } /** returns password */ byte[] getPassword() { return password; } /** returns tracking id */ public String getTrackingId() { return trackingId; } } /** Remove expired delegation tokens from cache */ private void removeExpiredToken() throws IOException { long now = Time.now(); Set<TokenIdent> expiredTokens = new HashSet<TokenIdent>(); synchronized (this) { Iterator<Map.Entry<TokenIdent, DelegationTokenInformation>> i = currentTokens.entrySet().iterator(); while (i.hasNext()) { Map.Entry<TokenIdent, DelegationTokenInformation> entry = i.next(); long renewDate = entry.getValue().getRenewDate(); if (renewDate < now) { expiredTokens.add(entry.getKey()); i.remove(); } } } // don't hold lock on 'this' to avoid edit log updates blocking token ops for (TokenIdent ident : expiredTokens) { logExpireToken(ident); removeStoredToken(ident); } } public void stopThreads() { if (LOG.isDebugEnabled()) LOG.debug("Stopping expired delegation token remover thread"); running = false; if (tokenRemoverThread != null) { synchronized (noInterruptsLock) { tokenRemoverThread.interrupt(); } try { tokenRemoverThread.join(); } catch (InterruptedException e) { throw new RuntimeException( "Unable to join on token removal thread", e); } } } /** * is secretMgr running * @return true if secret mgr is running */ public synchronized boolean isRunning() { return running; } private class ExpiredTokenRemover extends Thread { private long lastMasterKeyUpdate; private long lastTokenCacheCleanup; @Override public void run() { LOG.info("Starting expired delegation token remover thread, " + "tokenRemoverScanInterval=" + tokenRemoverScanInterval / (60 * 1000) + " min(s)"); try { while (running) { long now = Time.now(); if (lastMasterKeyUpdate + keyUpdateInterval < now) { try { rollMasterKey(); lastMasterKeyUpdate = now; } catch (IOException e) { LOG.error("Master key updating failed: ", e); } } if (lastTokenCacheCleanup + tokenRemoverScanInterval < now) { removeExpiredToken(); lastTokenCacheCleanup = now; } try { Thread.sleep(Math.min(5000, keyUpdateInterval)); // 5 seconds } catch (InterruptedException ie) { LOG.error("ExpiredTokenRemover received " + ie); } } } catch (Throwable t) { LOG.error("ExpiredTokenRemover thread received unexpected exception", t); Runtime.getRuntime().exit(-1); } } } /** * Decode the token identifier. The subclass can customize the way to decode * the token identifier. * * @param token the token where to extract the identifier * @return the delegation token identifier * @throws IOException */ public TokenIdent decodeTokenIdentifier(Token<TokenIdent> token) throws IOException { return token.decodeIdentifier(); } }
/* * Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.mb.platform.tests.clustering; import java.net.InetSocketAddress; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Parameters; import org.testng.annotations.Test; import org.wso2.carbon.andes.stub.AndesAdminServiceBrokerManagerAdminException; import org.wso2.carbon.authenticator.stub.LoginAuthenticationExceptionException; import org.wso2.carbon.automation.engine.context.TestUserMode; import org.wso2.carbon.integration.common.utils.exceptions.AutomationUtilException; import org.wso2.mb.integration.common.clients.AndesClient; import org.wso2.mb.integration.common.clients.configurations.AndesJMSConsumerClientConfiguration; import org.wso2.mb.integration.common.clients.configurations.AndesJMSPublisherClientConfiguration; import org.wso2.mb.integration.common.clients.exceptions.AndesClientException; import org.wso2.mb.integration.common.clients.operations.clients.AndesAdminClient; import org.wso2.mb.integration.common.clients.operations.utils.AndesClientConstants; import org.wso2.mb.integration.common.clients.exceptions.AndesClientConfigurationException; import org.wso2.mb.integration.common.clients.operations.utils.AndesClientUtils; import org.wso2.mb.integration.common.clients.operations.utils.ExchangeType; import org.wso2.mb.integration.common.clients.operations.utils.JMSMessageType; import org.wso2.mb.platform.common.utils.DataAccessUtil; import org.wso2.mb.platform.common.utils.MBPlatformBaseTest; import org.wso2.mb.platform.common.utils.exceptions.DataAccessUtilException; import org.xml.sax.SAXException; import javax.jms.JMSException; import javax.naming.NamingException; import javax.xml.stream.XMLStreamException; import javax.xml.xpath.XPathExpressionException; import java.io.IOException; import java.net.URISyntaxException; import java.rmi.RemoteException; /** * This class includes test cases to test different types of messages (e.g. byte, map, object, * stream) which can be sent to a topic. */ public class DifferentMessageTypesQueueTestCase extends MBPlatformBaseTest { private DataAccessUtil dataAccessUtil = new DataAccessUtil(); /** * Prepare environment for tests. * * @throws LoginAuthenticationExceptionException * @throws IOException * @throws XPathExpressionException * @throws URISyntaxException * @throws SAXException * @throws XMLStreamException */ @BeforeClass(alwaysRun = true) public void init() throws LoginAuthenticationExceptionException, IOException, XPathExpressionException, URISyntaxException, SAXException, XMLStreamException, AutomationUtilException { super.initCluster(TestUserMode.SUPER_TENANT_ADMIN); super.initAndesAdminClients(); } /** * Publish byte messages to a queue in a single node and receive from the same node with one * subscriber * * @param messageCount number of message to send and receive * @throws XPathExpressionException * @throws AndesClientConfigurationException * @throws NamingException * @throws JMSException * @throws IOException * @throws AndesClientException */ @Test(groups = "wso2.mb", description = "single publisher single subscriber byte messages", enabled = true) @Parameters({"messageCount"}) public void testByteMessageSingleSubSinglePub(long messageCount) throws XPathExpressionException, AndesClientConfigurationException, NamingException, JMSException, IOException, AndesClientException, DataAccessUtilException { this.runMessageTypeTestCase(JMSMessageType.BYTE, 1, "byteMessageQueue1", messageCount); } /** * Publish byte messages to a queue in a single node and receive from the same node with * multiple publishers and subscribe to that queue using multiple subscribers * * @param messageCount number of message to send and receive * @throws IOException * @throws JMSException * @throws AndesClientConfigurationException * @throws XPathExpressionException * @throws NamingException * @throws AndesClientException */ @Test(groups = "wso2.mb", description = "multiple publisher multiple subscriber byte " + "messages", enabled = true) @Parameters({"messageCount"}) public void testByteMessageMultipleSubMultiplePub(long messageCount) throws IOException, JMSException, AndesClientConfigurationException, XPathExpressionException, NamingException, AndesClientException, DataAccessUtilException { this.runMessageTypeTestCase(JMSMessageType.BYTE, 10, "byteMessageQueue2", messageCount); } /** * Publish map messages to a queue in a single node and receive from the same node with one * subscriber * * @param messageCount number of message to send and receive * @throws IOException * @throws JMSException * @throws AndesClientConfigurationException * @throws XPathExpressionException * @throws NamingException * @throws AndesClientException */ @Test(groups = "wso2.mb", description = "single publisher single subscriber map messages", enabled = true) @Parameters({"messageCount"}) public void testMapMessageSingleSubSinglePub(long messageCount) throws IOException, JMSException, AndesClientConfigurationException, XPathExpressionException, NamingException, AndesClientException, DataAccessUtilException { this.runMessageTypeTestCase(JMSMessageType.MAP, 1, "mapMessageQueue1", messageCount); } /** * Publish map messages to a queue in a single node and receive from the same node with * multiple publishers and subscribe to that queue using multiple subscribers * * @param messageCount number of message to send and receive * @throws IOException * @throws JMSException * @throws AndesClientConfigurationException * @throws XPathExpressionException * @throws NamingException * @throws AndesClientException */ @Test(groups = "wso2.mb", description = "multiple publisher multiple subscriber map " + "messages", enabled = true) @Parameters({"messageCount"}) public void testMapMessageMultiplePubMultipleSub(long messageCount) throws IOException, JMSException, AndesClientConfigurationException, XPathExpressionException, NamingException, AndesClientException, DataAccessUtilException { this.runMessageTypeTestCase(JMSMessageType.MAP, 10, "mapMessageQueue2", messageCount); } /** * Publish Object messages to a queue in a single node and receive from the same node with one * subscriber * * @param messageCount number of message to send and receive * @throws IOException * @throws JMSException * @throws AndesClientConfigurationException * @throws XPathExpressionException * @throws NamingException * @throws AndesClientException */ @Test(groups = "wso2.mb", description = "single publisher single subscriber object messages", enabled = true) @Parameters({"messageCount"}) public void testObjectMessageSingleSubSinglePub(long messageCount) throws IOException, JMSException, AndesClientConfigurationException, XPathExpressionException, NamingException, AndesClientException, DataAccessUtilException { this.runMessageTypeTestCase(JMSMessageType.OBJECT, 1, "objectMessageQueue1", messageCount); } /** * Publish object messages to a queue in a single node and receive from the same node with * multiple publishers and subscribe to that queue using multiple subscribers * * @param messageCount number of message to send and receive * @throws IOException * @throws JMSException * @throws AndesClientConfigurationException * @throws XPathExpressionException * @throws NamingException * @throws AndesClientException */ @Test(groups = "wso2.mb", description = "multiple publisher multiple subscriber object " + "messages", enabled = true) @Parameters({"messageCount"}) public void testObjectMessageMultiplePubMultipleSub(long messageCount) throws IOException, JMSException, AndesClientConfigurationException, XPathExpressionException, NamingException, AndesClientException, DataAccessUtilException { this.runMessageTypeTestCase(JMSMessageType.OBJECT, 10, "objectMessageQueue2", messageCount); } /** * Publish stream messages to a queue in a single node and receive from the same node with one * subscriber * * @param messageCount number of message to send and receive * @throws IOException * @throws JMSException * @throws AndesClientConfigurationException * @throws XPathExpressionException * @throws NamingException * @throws AndesClientException */ @Test(groups = "wso2.mb", description = "single publisher single subscriber stream messages", enabled = true) @Parameters({"messageCount"}) public void testStreamMessageSingleSubSinglePub(long messageCount) throws IOException, JMSException, AndesClientConfigurationException, XPathExpressionException, NamingException, AndesClientException, DataAccessUtilException { this.runMessageTypeTestCase(JMSMessageType.STREAM, 1, "streamMessageQueue1", messageCount); } /** * Publish stream messages to a queue in a single node and receive from the same node with * multiple publishers and subscribe to that queue using multiple subscribers * * @param messageCount number of message to send and receive * @throws IOException * @throws JMSException * @throws AndesClientConfigurationException * @throws XPathExpressionException * @throws NamingException * @throws AndesClientException */ @Test(groups = "wso2.mb", description = "multiple publisher multiple subscriber stream " + "messages", enabled = true) @Parameters({"messageCount"}) public void testStreamMessageMultiplePubMultipleSub(long messageCount) throws IOException, JMSException, AndesClientConfigurationException, XPathExpressionException, NamingException, AndesClientException, DataAccessUtilException { this.runMessageTypeTestCase(JMSMessageType.STREAM, 10, "streamMessageQueue2", messageCount); } /** * Cleanup after running tests. * * @throws AndesAdminServiceBrokerManagerAdminException * @throws RemoteException */ @AfterClass(alwaysRun = true) public void destroy() throws AndesAdminServiceBrokerManagerAdminException, RemoteException { String randomInstanceKey = getRandomMBInstance(); AndesAdminClient tempAndesAdminClient = getAndesAdminClientWithKey(randomInstanceKey); if (tempAndesAdminClient.getQueueByName("byteMessageQueue1") != null) { tempAndesAdminClient.deleteQueue("byteMessageQueue1"); } if (tempAndesAdminClient.getQueueByName("byteMessageQueue2") != null) { tempAndesAdminClient.deleteQueue("byteMessageQueue2"); } if (tempAndesAdminClient.getQueueByName("mapMessageQueue1") != null) { tempAndesAdminClient.deleteQueue("mapMessageQueue1"); } if (tempAndesAdminClient.getQueueByName("mapMessageQueue2") != null) { tempAndesAdminClient.deleteQueue("mapMessageQueue2"); } if (tempAndesAdminClient.getQueueByName("objectMessageQueue1") != null) { tempAndesAdminClient.deleteQueue("objectMessageQueue1"); } if (tempAndesAdminClient.getQueueByName("objectMessageQueue2") != null) { tempAndesAdminClient.deleteQueue("objectMessageQueue2"); } if (tempAndesAdminClient.getQueueByName("streamMessageQueue1") != null) { tempAndesAdminClient.deleteQueue("streamMessageQueue1"); } if (tempAndesAdminClient.getQueueByName("streamMessageQueue2") != null) { tempAndesAdminClient.deleteQueue("streamMessageQueue2"); } } /** * Runs a topic send and receive test case * * @param messageType The message type to be used when publishing * @param numberOfPublishers The number of publishers * @param destinationName The destination name for sender and receiver * @param messageCount Number of message to send and receive * @throws XPathExpressionException * @throws AndesClientConfigurationException * @throws NamingException * @throws JMSException * @throws IOException * @throws AndesClientException */ private void runMessageTypeTestCase(JMSMessageType messageType, int numberOfPublishers, String destinationName, long messageCount) throws XPathExpressionException, AndesClientConfigurationException, NamingException, JMSException, IOException, AndesClientException, DataAccessUtilException { // Number of messages send long sendCount = messageCount; long printDivider = 10L; InetSocketAddress brokerAddress = getRandomAMQPBrokerAddress(); // Creating a consumer client configuration AndesJMSConsumerClientConfiguration consumerConfig = new AndesJMSConsumerClientConfiguration(brokerAddress.getHostName(), brokerAddress.getPort(), ExchangeType.QUEUE, destinationName); consumerConfig.setMaximumMessagesToReceived(sendCount * numberOfPublishers); consumerConfig.setPrintsPerMessageCount(sendCount / printDivider); // Creating publisher client configuration AndesJMSPublisherClientConfiguration publisherConfig = new AndesJMSPublisherClientConfiguration(brokerAddress.getHostName(), brokerAddress.getPort(), ExchangeType.QUEUE, destinationName); publisherConfig.setNumberOfMessagesToSend(sendCount); publisherConfig.setPrintsPerMessageCount(sendCount / printDivider); publisherConfig.setJMSMessageType(messageType); // Creating clients AndesClient consumerClient = new AndesClient(consumerConfig, true); consumerClient.startClient(); AndesClient publisherClient = new AndesClient(publisherConfig, numberOfPublishers, true); publisherClient.startClient(); AndesClientUtils.waitForMessagesAndShutdown(consumerClient, AndesClientConstants.DEFAULT_RUN_TIME); // Evaluating Assert.assertEquals(publisherClient.getSentMessageCount(), sendCount * numberOfPublishers, "Message sending failed."); Assert.assertEquals(consumerClient.getReceivedMessageCount(), sendCount * numberOfPublishers, "Message receiving failed."); // Evaluate messages left in database Assert.assertEquals(dataAccessUtil.getMessageCountForQueue(destinationName), 0, "Messages left in database"); // Evaluate slots left in database Assert.assertEquals(dataAccessUtil.getAssignedSlotCountForQueue(destinationName), 0, "Slots left in database"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.igfs; import org.apache.commons.io.IOUtils; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteFileSystem; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.igfs.IgfsDirectoryNotEmptyException; import org.apache.ignite.igfs.IgfsException; import org.apache.ignite.igfs.IgfsFile; import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; import org.apache.ignite.igfs.IgfsInputStream; import org.apache.ignite.igfs.IgfsOutputStream; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.cache.GridCacheAdapter; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.GridTestUtils; import org.jetbrains.annotations.Nullable; import javax.cache.Cache; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheMode.REPLICATED; /** * Tests for {@link IgfsProcessor}. */ public class IgfsProcessorSelfTest extends IgfsCommonAbstractTest { /** Test IP finder. */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** Random numbers generator. */ protected final SecureRandom rnd = new SecureRandom(); /** File system. */ protected IgniteFileSystem igfs; /** Meta cache. */ private GridCacheAdapter<Object, Object> metaCache; /** Meta cache name. */ private String metaCacheName; /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { IgniteEx grid = grid(0); igfs = grid.fileSystem(igfsName()); FileSystemConfiguration[] cfgs = grid.configuration().getFileSystemConfiguration(); assert cfgs.length == 1; metaCacheName = cfgs[0].getMetaCacheConfiguration().getName(); metaCache = ((IgniteKernal)grid).internalCache(metaCacheName); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); igfs.clear(); } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGrids(nodesCount()); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); TcpDiscoverySpi discoSpi = new TcpDiscoverySpi(); discoSpi.setIpFinder(IP_FINDER); cfg.setDiscoverySpi(discoSpi); FileSystemConfiguration igfsCfg = new FileSystemConfiguration(); igfsCfg.setMetaCacheConfiguration(cacheConfiguration("meta")); igfsCfg.setDataCacheConfiguration(cacheConfiguration("data")); igfsCfg.setName("igfs"); cfg.setFileSystemConfiguration(igfsCfg); return cfg; } /** {@inheritDoc} */ protected CacheConfiguration cacheConfiguration(String cacheName) { CacheConfiguration cacheCfg = defaultCacheConfiguration(); cacheCfg.setName(cacheName); if ("meta".equals(cacheName)) cacheCfg.setCacheMode(REPLICATED); else { cacheCfg.setCacheMode(PARTITIONED); cacheCfg.setNearConfiguration(null); cacheCfg.setBackups(0); cacheCfg.setAffinityMapper(new IgfsGroupDataBlocksKeyMapper(128)); } cacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); cacheCfg.setAtomicityMode(TRANSACTIONAL); return cacheCfg; } /** @return Test nodes count. */ public int nodesCount() { return 1; } /** @return FS name. */ public String igfsName() { return "igfs"; } /** @throws Exception If failed. */ public void testigfsEnabled() throws Exception { IgniteFileSystem igfs = grid(0).fileSystem(igfsName()); assertNotNull(igfs); } /** * Test properties management in meta-cache. * * @throws Exception If failed. */ public void testUpdateProperties() throws Exception { IgfsPath p = path("/tmp/my"); igfs.mkdirs(p); Map<String, String> oldProps = igfs.info(p).properties(); igfs.update(p, F.asMap("a", "1")); igfs.update(p, F.asMap("b", "2")); assertEquals("1", igfs.info(p).property("a")); assertEquals("2", igfs.info(p).property("b")); igfs.update(p, F.asMap("b", "3")); Map<String, String> expProps = new HashMap<>(oldProps); expProps.put("a", "1"); expProps.put("b", "3"); assertEquals("3", igfs.info(p).property("b")); assertEquals(expProps, igfs.info(p).properties()); assertEquals("5", igfs.info(p).property("c", "5")); assertUpdatePropertiesFails(null, null, NullPointerException.class, "Ouch! Argument cannot be null"); assertUpdatePropertiesFails(p, null, NullPointerException.class, "Ouch! Argument cannot be null"); assertUpdatePropertiesFails(null, F.asMap("x", "9"), NullPointerException.class, "Ouch! Argument cannot be null"); assertUpdatePropertiesFails(p, Collections.<String, String>emptyMap(), IllegalArgumentException.class, "Ouch! Argument is invalid"); } /** @throws Exception If failed. */ public void testCreate() throws Exception { IgfsPath path = path("/file"); try (IgfsOutputStream os = igfs.create(path, false)) { assert os != null; IgfsFileImpl info = (IgfsFileImpl)igfs.info(path); for (int i = 0; i < nodesCount(); i++) { IgfsEntryInfo fileInfo = (IgfsEntryInfo)grid(i).cachex(metaCacheName).localPeek(info.fileId(), ONHEAP_PEEK_MODES, null); assertNotNull(fileInfo); assertNotNull(fileInfo.listing()); } } finally { igfs.delete(path("/"), true); } } /** * Test make directories. * * @throws Exception In case of any exception. */ public void testMakeListDeleteDirs() throws Exception { assertListDir("/"); igfs.mkdirs(path("/ab/cd/ef")); assertListDir("/", "ab"); assertListDir("/ab", "cd"); assertListDir("/ab/cd", "ef"); igfs.mkdirs(path("/ab/ef")); igfs.mkdirs(path("/cd/ef")); igfs.mkdirs(path("/cd/gh")); igfs.mkdirs(path("/ef")); igfs.mkdirs(path("/ef/1")); igfs.mkdirs(path("/ef/2")); igfs.mkdirs(path("/ef/3")); assertListDir("/", "ef", "ab", "cd"); assertListDir("/ab", "cd", "ef"); assertListDir("/ab/cd", "ef"); assertListDir("/ab/cd/ef"); assertListDir("/cd", "ef", "gh"); assertListDir("/cd/ef"); assertListDir("/ef", "1", "2", "3"); igfs.delete(path("/ef/2"), false); assertListDir("/", "ef", "ab", "cd"); assertListDir("/ef", "1", "3"); // Delete should return false for non-existing paths. assertFalse(igfs.delete(path("/ef/2"), false)); assertListDir("/", "ef", "ab", "cd"); assertListDir("/ef", "1", "3"); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.delete(path("/ef"), false); return null; } }, IgfsDirectoryNotEmptyException.class, null); assertListDir("/", "ef", "ab", "cd"); assertListDir("/ef", "1", "3"); igfs.delete(path("/ef"), true); assertListDir("/", "ab", "cd"); } /** * Test make directories in multi-threaded environment. * * @throws Exception In case of any exception. */ @SuppressWarnings("TooBroadScope") public void testMakeListDeleteDirsMultithreaded() throws Exception { assertListDir("/"); final int max = 2 * 1000; final int threads = 50; final AtomicInteger cnt = new AtomicInteger(); info("Create directories: " + max); GridTestUtils.runMultiThreaded(new Callable<Object>() { @Override public Object call() throws Exception { for (int cur = cnt.incrementAndGet(); cur < max; cur = cnt.incrementAndGet()) igfs.mkdirs(path(cur)); return null; } }, threads, "grid-test-make-directories"); info("Validate directories were created."); cnt.set(0); // Reset counter. GridTestUtils.runMultiThreaded(new Callable<Object>() { @Override public Object call() throws Exception { for (int cur = cnt.incrementAndGet(); cur < max; cur = cnt.incrementAndGet()) { IgfsFile info = igfs.info(path(cur)); assertNotNull("Expects file exist: " + cur, info); assertTrue("Expects file is a directory: " + cur, info.isDirectory()); } return null; } }, threads, "grid-test-check-directories-exist"); info("Validate directories removing."); cnt.set(0); // Reset counter. GridTestUtils.runMultiThreaded(new Callable<Object>() { @Override public Object call() throws Exception { for (int cur = cnt.incrementAndGet(); cur < max; cur = cnt.incrementAndGet()) igfs.delete(path(cur), true); return null; } }, threads, "grid-test-delete-directories"); } /** @throws Exception If failed. */ public void testBasicOps() throws Exception { // Create directories. igfs.mkdirs(path("/A/B1/C1")); for (Object key : metaCache.keySet()) info("Entry in cache [key=" + key + ", val=" + metaCache.get(key) + ']'); igfs.mkdirs(path("/A/B1/C2")); igfs.mkdirs(path("/A/B1/C3")); igfs.mkdirs(path("/A/B2/C1")); igfs.mkdirs(path("/A/B2/C2")); igfs.mkdirs(path("/A1/B1/C1")); igfs.mkdirs(path("/A1/B1/C2")); igfs.mkdirs(path("/A1/B1/C3")); igfs.mkdirs(path("/A2/B2/C1")); igfs.mkdirs(path("/A2/B2/C2")); for (Object key : metaCache.keySet()) info("Entry in cache [key=" + key + ", val=" + metaCache.get(key) + ']'); // Check existence. assert igfs.exists(path("/A/B1/C1")); // List items. Collection<IgfsPath> paths = igfs.listPaths(path("/")); assert paths.size() == 3 : "Unexpected paths: " + paths; paths = igfs.listPaths(path("/A")); assert paths.size() == 2 : "Unexpected paths: " + paths; paths = igfs.listPaths(path("/A/B1")); assert paths.size() == 3 : "Unexpected paths: " + paths; igfs.delete(path("/A1/B1/C1"), false); assertNull(igfs.info(path("/A1/B1/C1"))); igfs.delete(path("/A1/B1/C2"), false); assertNull(igfs.info(path("/A1/B1/C2"))); igfs.delete(path("/A1/B1/C3"), false); assertNull(igfs.info(path("/A1/B1/C3"))); assertTrue(F.isEmpty(igfs.listPaths(path("/A1/B1")))); igfs.delete(path("/A2/B2"), true); assertNull(igfs.info(path("/A2/B2"))); assertTrue(F.isEmpty(igfs.listPaths(path("/A2")))); assertEquals(Arrays.asList(path("/A"), path("/A1"), path("/A2")), sorted(igfs.listPaths(path("/")))); // Delete root when it is not empty: igfs.delete(path("/"), true); igfs.delete(path("/"), false); igfs.delete(path("/A"), true); igfs.delete(path("/A1"), true); igfs.delete(path("/A2"), true); assertTrue(F.isEmpty(igfs.listPaths(path("/")))); // Delete root when it is empty: igfs.delete(path("/"), false); igfs.delete(path("/"), true); assertTrue(F.isEmpty(igfs.listPaths(path("/")))); for (Cache.Entry<Object, Object> e : metaCache) info("Entry in cache [key=" + e.getKey() + ", val=" + e.getValue() + ']'); } /** * Ensure correct size calculation. * * @throws Exception If failed. */ public void testSize() throws Exception { IgfsPath dir1 = path("/dir1"); IgfsPath subDir1 = path("/dir1/subdir1"); IgfsPath dir2 = path("/dir2"); IgfsPath fileDir1 = path("/dir1/file"); IgfsPath fileSubdir1 = path("/dir1/subdir1/file"); IgfsPath fileDir2 = path("/dir2/file"); IgfsOutputStream os = igfs.create(fileDir1, false); os.write(new byte[1000]); os.close(); os = igfs.create(fileSubdir1, false); os.write(new byte[2000]); os.close(); os = igfs.create(fileDir2, false); os.write(new byte[4000]); os.close(); assert igfs.size(fileDir1) == 1000; assert igfs.size(fileSubdir1) == 2000; assert igfs.size(fileDir2) == 4000; assert igfs.size(dir1) == 3000; assert igfs.size(subDir1) == 2000; assert igfs.size(dir2) == 4000; } /** * Convert collection into sorted list. * * @param col Unsorted collection. * @return Sorted collection. */ private <T extends Comparable<T>> List<T> sorted(Collection<T> col) { List<T> list = new ArrayList<>(col); Collections.sort(list); return list; } /** @throws Exception If failed. */ public void testRename() throws Exception { // Create directories. igfs.mkdirs(path("/A/B1/C1")); for (Object key : metaCache.keySet()) info("Entry in cache [key=" + key + ", val=" + metaCache.get(key) + ']'); // Move under itself. GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.rename(path("/A/B1/C1"), path("/A/B1/C1/C2")); return null; } }, IgfsException.class, null); // Move under itself. GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.rename(path("/A/B1/C1"), path("/A/B1/C1/D/C2")); return null; } }, IgfsException.class, null); // Move under itself. GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.rename(path("/A/B1/C1"), path("/A/B1/C1/D/E/C2")); return null; } }, IgfsException.class, null); /// // F6 > Enter > Tab x N times // "I like to move it, move it..." // Collection<IgniteBiTuple<String, String>> chain = Arrays.asList( F.t("/A/B1/C1", "/A/B1/C2"), F.t("/A/B1", "/A/B2"), F.t("/A", "/Q"), //F.t("/Q/B2/C2", "/C3"), F.t("/Q/B2/C2", "/Q/B2/C1"), F.t("/Q/B2", "/Q/B1"), F.t("/Q", "/A"), //F.t("/C3", "/A/B1/C1") F.t("/A/B1/C1", "/"), F.t("/C1", "/A/B1") ); final IgfsPath root = path("/"); for (IgniteBiTuple<String, String> e : chain) { final IgfsPath p1 = path(e.get1()); final IgfsPath p2 = path(e.get2()); assertTrue("Entry: " + e, igfs.exists(p1)); igfs.rename(p1, p2); assertFalse("Entry: " + e, igfs.exists(p1)); assertTrue("Entry: " + e, igfs.exists(p2)); // Test root rename. GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.rename(root, p1); return null; } }, IgfsException.class, null); // Test root rename. GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.rename(p1, root); return null; } }, IgfsException.class, null); // Test root rename. if (!root.equals(p2)) { GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.rename(root, p2); return null; } }, IgfsException.class, null); } // Test same rename. igfs.rename(p1, p1); igfs.rename(p2, p2); } // List items. assertEquals(Arrays.asList(path("/A")), sorted(igfs.listPaths(root))); assertEquals(Arrays.asList(path("/A/B1")), sorted(igfs.listPaths(path("/A")))); assertEquals(Arrays.asList(path("/A/B1/C1")), sorted(igfs.listPaths(path("/A/B1")))); String text = "Test long number: " + rnd.nextLong(); // Create file. assertEquals(text, create("/A/a", false, text)); // Validate renamed during reading. try (IgfsInputStream in0 = igfs.open(path("/A/a"))) { // Rename file. igfs.rename(path("/A/a"), path("/b")); assertEquals(text, IOUtils.toString(in0, UTF_8)); } // Validate after renamed. assertOpenFails("/A/a", "File not found"); assertEquals(text, read("/b")); // Cleanup. igfs.clear(); assertTrue(F.isEmpty(igfs.listPaths(root))); } /** * @param path Path. * @return IGFS path. */ private IgfsPath path(String path) { assert path != null; return new IgfsPath(path); } /** * @param i Path index. * @return IGFS path. */ private IgfsPath path(long i) { //return path(String.format("/%d", i)); return path(String.format("/%d/q/%d/%d", i % 10, (i / 10) % 10, i)); } /** @throws Exception If failed. */ public void testCreateOpenAppend() throws Exception { // Error - path points to root directory. assertCreateFails("/", false); // Create directories. igfs.mkdirs(path("/A/B1/C1")); // Error - path points to directory. for (String path : Arrays.asList("/A", "/A/B1", "/A/B1/C1")) { assertCreateFails(path, false); assertCreateFails(path, true); assertAppendFails(path, false); assertAppendFails(path, true); assertOpenFails(path, "Failed to open file (not a file)"); } String text1 = "Test long number #1: " + rnd.nextLong(); String text2 = "Test long number #2: " + rnd.nextLong(); // Error - parent does not exist. for (String path : Arrays.asList("/A/a", "/A/B1/a", "/A/B1/C1/a")) { // Error - file doesn't exist. assertOpenFails(path, "File not found"); assertAppendFails(path, false); // Create new and write. assertEquals(text1, create(path, false, text1)); // Error - file already exists. assertCreateFails(path, false); // Overwrite existent. assertEquals(text2, create(path, true, text2)); // Append text. assertEquals(text2 + text1, append(path, false, text1)); // Append text. assertEquals(text2 + text1 + text2, append(path, true, text2)); // Delete this file. igfs.delete(path(path), true); // Error - file doesn't exist. assertOpenFails(path, "File not found"); assertAppendFails(path, false); // Create with append. assertEquals(text1, append(path, true, text1)); // Append. for (String full = text1, cur = ""; full.length() < 10000; cur = ", long=" + rnd.nextLong()) assertEquals(full += cur, append(path, rnd.nextBoolean(), cur)); igfs.delete(path(path), false); } } /** @throws Exception If failed. */ @SuppressWarnings("BusyWait") public void testDeleteCacheConsistency() throws Exception { IgfsPath path = new IgfsPath("/someFile"); String metaCacheName = grid(0).igfsx("igfs").configuration().getMetaCacheConfiguration().getName(); String dataCacheName = grid(0).igfsx("igfs").configuration().getDataCacheConfiguration().getName(); try (IgfsOutputStream out = igfs.create(path, true)) { out.write(new byte[10 * 1024 * 1024]); } IgniteUuid fileId = U.field(igfs.info(path), "fileId"); GridCacheAdapter<IgniteUuid, IgfsEntryInfo> metaCache = ((IgniteKernal)grid(0)).internalCache(metaCacheName); GridCacheAdapter<IgfsBlockKey, byte[]> dataCache = ((IgniteKernal)grid(0)).internalCache(dataCacheName); IgfsEntryInfo info = metaCache.get(fileId); assertNotNull(info); assertTrue(info.isFile()); assertNotNull(metaCache.get(info.id())); IgfsDataManager dataMgr = ((IgfsEx)igfs).context().data(); for (int i = 0; i < info.blocksCount(); i++) assertNotNull(dataCache.get(dataMgr.blockKey(i, info))); igfs.delete(path, true); for (int i = 0; i < 25; i++) { if (metaCache.get(info.id()) == null) break; U.sleep(100); } assertNull(metaCache.get(info.id())); for (int i = 0; i < 10; i++) { boolean doBreak = true; for (int j = 0; j < info.blocksCount(); j++) { if (dataCache.get(dataMgr.blockKey(i, info)) != null) { doBreak = false; break; } } if (doBreak) break; else Thread.sleep(100); } for (int i = 0; i < info.blocksCount(); i++) assertNull(dataCache.get(new IgfsBlockKey(info.id(), null, false, i))); } /** @throws Exception If failed. */ public void testCreateAppendLongData1() throws Exception { checkCreateAppendLongData(123, 1024, 100); } /** @throws Exception If failed. */ public void testCreateAppendLongData2() throws Exception { checkCreateAppendLongData(123 + 1024, 1024, 100); } /** @throws Exception If failed. */ public void testCreateAppendLongData3() throws Exception { checkCreateAppendLongData(123, 1024, 1000); } /** @throws Exception If failed. */ public void testCreateAppendLongData4() throws Exception { checkCreateAppendLongData(123 + 1024, 1024, 1000); } /** * Test format operation on non-empty file system. * * @throws Exception If failed. */ public void testFormatNonEmpty() throws Exception { String dirPath = "/A/B/C"; igfs.mkdirs(path(dirPath)); String filePath = "/someFile"; create(filePath, false, "Some text."); igfs.clear(); assert !igfs.exists(path(dirPath)); assert !igfs.exists(path(filePath)); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { int metaSize = 0; for (Object metaId : grid(0).cachex(igfs.configuration().getMetaCacheConfiguration().getName()) .keySet()) { if (!IgfsUtils.isRootOrTrashId((IgniteUuid)metaId)) metaSize++; } return metaSize == 0; } }, 5000); } /** * Test format operation on empty file system. * * @throws Exception If failed. */ public void testFormatEmpty() throws Exception { igfs.clear(); } /** * @param chunkSize Chunk size. * @param bufSize Buffer size. * @param cnt Count. * @throws Exception If failed. */ private void checkCreateAppendLongData(int chunkSize, int bufSize, int cnt) throws Exception { IgfsPath path = new IgfsPath("/someFile"); byte[] buf = new byte[chunkSize]; for (int i = 0; i < buf.length; i++) buf[i] = (byte)(i * i); IgfsOutputStream os = igfs.create(path, bufSize, true, null, 0, 1024, null); try { for (int i = 0; i < cnt; i++) os.write(buf); os.flush(); } finally { os.close(); } os = igfs.append(path, chunkSize, false, null); try { for (int i = 0; i < cnt; i++) os.write(buf); os.flush(); } finally { os.close(); } byte[] readBuf = new byte[chunkSize]; try (IgfsInputStream in = igfs.open(path)) { long pos = 0; for (int k = 0; k < 2 * cnt; k++) { in.readFully(pos, readBuf); for (int i = 0; i < readBuf.length; i++) assertEquals(buf[i], readBuf[i]); pos += readBuf.length; } } } /** * Create file and write specified text to. * * @param path File path to create. * @param overwrite Overwrite file if it already exists. * @param text Text to write into file. * @return Content of this file. * @throws IgniteCheckedException In case of error. */ private String create(String path, boolean overwrite, String text) throws Exception { try (IgfsOutputStream out = igfs.create(path(path), overwrite)) { IOUtils.write(text, out, UTF_8); } assertNotNull(igfs.info(path(path))); return read(path); } /** * Appent text to the file. * * @param path File path to create. * @param create Create file if it doesn't exist yet. * @param text Text to append to file. * @return Content of this file. * @throws IgniteCheckedException In case of error. */ private String append(String path, boolean create, String text) throws Exception { try (IgfsOutputStream out = igfs.append(path(path), create)) { IOUtils.write(text, out, UTF_8); } assertNotNull(igfs.info(path(path))); return read(path); } /** * Read content of the file. * * @param path File path to read. * @return Content of this file. * @throws IgniteCheckedException In case of error. */ private String read(String path) throws Exception { try (IgfsInputStream in = igfs.open(path(path))) { return IOUtils.toString(in, UTF_8); } } /** * Test expected failures for 'update properties' operation. * * @param path Path to the file. * @param props File properties to set. * @param msg Failure message if expected exception was not thrown. */ private void assertUpdatePropertiesFails(@Nullable final IgfsPath path, @Nullable final Map<String, String> props, Class<? extends Throwable> cls, @Nullable String msg) { GridTestUtils.assertThrows(log, new Callable() { @Override public Object call() throws Exception { return igfs.update(path, props); } }, cls, msg); } /** * Test expected failures for 'create' operation. * * @param path File path to create. * @param overwrite Overwrite file if it already exists. Note: you cannot overwrite an existent directory. */ private void assertCreateFails(final String path, final boolean overwrite) { GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.create(path(path), overwrite); return false; } }, IgfsException.class, null); } /** * Test expected failures for 'append' operation. * * @param path File path to append. * @param create Create file if it doesn't exist yet. */ private void assertAppendFails(final String path, final boolean create) { GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.append(path(path), create); return false; } }, IgfsException.class, null); } /** * Test expected failures for 'open' operation. * * @param path File path to read. * @param msg Failure message if expected exception was not thrown. */ private void assertOpenFails(final String path, @Nullable String msg) { GridTestUtils.assertThrowsInherited(log, new Callable<Object>() { @Override public Object call() throws Exception { igfs.open(path(path)); return false; } }, IgniteException.class, msg); } /** * Validate directory listing. * * @param path Directory path to validate listing for. * @param item List of directory items. */ private void assertListDir(String path, String... item) { Collection<IgfsFile> files = igfs.listFiles(new IgfsPath(path)); List<String> names = new ArrayList<>(item.length); for (IgfsFile file : files) names.add(file.path().name()); Arrays.sort(item); Collections.sort(names); assertEquals(Arrays.asList(item), names); } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.config; import java.util.ArrayList; import java.util.List; import static com.hazelcast.util.Preconditions.checkFalse; import static com.hazelcast.util.Preconditions.checkHasText; import static com.hazelcast.util.Preconditions.checkNotNegative; import static com.hazelcast.util.Preconditions.checkNotNull; import static com.hazelcast.util.Preconditions.checkPositive; /** * Contains configuration for {@code QueryCache}. * * @since 3.5 */ public class QueryCacheConfig { /** * By default, after reaching this minimum size, node immediately sends buffered events to {@code QueryCache}. */ public static final int DEFAULT_BATCH_SIZE = 1; /** * By default, only buffer last {@value #DEFAULT_BUFFER_SIZE} events fired from a partition. */ public static final int DEFAULT_BUFFER_SIZE = 16; /** * Default value of delay seconds which an event wait in the buffer of a node, before sending to {@code QueryCache}. */ public static final int DEFAULT_DELAY_SECONDS = 0; /** * By default, also cache values of entries besides keys. */ public static final boolean DEFAULT_INCLUDE_VALUE = true; /** * By default, execute an initial population query prior to creation of the {@code QueryCache}. */ public static final boolean DEFAULT_POPULATE = true; /** * Default value of coalesce property. */ public static final boolean DEFAULT_COALESCE = false; /** * By default, hold values of entries in {@code QueryCache} as binary. */ public static final InMemoryFormat DEFAULT_IN_MEMORY_FORMAT = InMemoryFormat.BINARY; /** * After reaching this minimum size, node immediately sends buffered events to {@code QueryCache}. */ private int batchSize = DEFAULT_BATCH_SIZE; /** * Maximum number of events which can be stored in a buffer of partition. */ private int bufferSize = DEFAULT_BUFFER_SIZE; /** * The minimum number of delay seconds which an event waits in the buffer of node. */ private int delaySeconds = DEFAULT_DELAY_SECONDS; /** * Flag to enable/disable value caching. */ private boolean includeValue = DEFAULT_INCLUDE_VALUE; /** * Flag to enable/disable initial population of the {@code QueryCache}. */ private boolean populate = DEFAULT_POPULATE; /** * Flag to enable/disable coalescing. * * @see #setCoalesce */ private boolean coalesce = DEFAULT_COALESCE; /** * Memory format of values of entries in {@code QueryCache}. */ private InMemoryFormat inMemoryFormat = DEFAULT_IN_MEMORY_FORMAT; /** * The name of {@code QueryCache}. */ private String name; /** * The predicate to filter events which wil be applied to the {@code QueryCache}. */ private PredicateConfig predicateConfig = new PredicateConfig(); private EvictionConfig evictionConfig = new EvictionConfig(); private List<EntryListenerConfig> entryListenerConfigs; private List<MapIndexConfig> indexConfigs; private QueryCacheConfigReadOnly readOnly; public QueryCacheConfig() { } public QueryCacheConfig(String name) { setName(name); } public QueryCacheConfig(QueryCacheConfig other) { this.batchSize = other.batchSize; this.bufferSize = other.bufferSize; this.delaySeconds = other.delaySeconds; this.includeValue = other.includeValue; this.populate = other.populate; this.coalesce = other.coalesce; this.inMemoryFormat = other.inMemoryFormat; this.name = other.name; this.predicateConfig = other.predicateConfig; this.evictionConfig = other.evictionConfig; this.entryListenerConfigs = other.entryListenerConfigs; this.indexConfigs = other.indexConfigs; } /** * Gets immutable version of this configuration. * * @return Immutable version of this configuration. * @deprecated this method will be removed in 4.0; it is meant for internal usage only. */ public QueryCacheConfigReadOnly getAsReadOnly() { if (readOnly == null) { readOnly = new QueryCacheConfigReadOnly(this); } return readOnly; } /** * Returns the name of {@code QueryCache}. * * @return the name of {@code QueryCache}. */ public String getName() { return name; } /** * Sets the name of {@code QueryCache}. * * @param name the name of {@code QueryCache}. * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setName(String name) { checkHasText(name, "name"); this.name = name; return this; } /** * Returns the predicate of {@code QueryCache}. * * @return the predicate of {@code QueryCache}. */ public PredicateConfig getPredicateConfig() { return predicateConfig; } /** * Sets the predicate of {@code QueryCache}. * * @param predicateConfig config for predicate. * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setPredicateConfig(PredicateConfig predicateConfig) { this.predicateConfig = checkNotNull(predicateConfig, "predicateConfig can not be null"); return this; } /** * After reaching this size, node sends buffered events to {@code QueryCache}. * * @return the batch size. */ public int getBatchSize() { return batchSize; } /** * Sets the batch size which will be used to determine number of events to be sent in a batch to {@code QueryCache} * * @param batchSize the batch size. * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setBatchSize(int batchSize) { checkPositive(batchSize, "batchSize"); this.batchSize = batchSize; return this; } /** * Returns the maximum number of events which can be stored in a buffer of partition. * * @return the maximum number of events which can be stored in a buffer of partition. */ public int getBufferSize() { return bufferSize; } /** * Sets the maximum number of events which can be stored in a buffer of partition. * * @param bufferSize the buffer size. * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setBufferSize(int bufferSize) { checkPositive(bufferSize, "bufferSize"); this.bufferSize = bufferSize; return this; } /** * Returns the minimum number of delay seconds which an event waits in the buffer of node * before sending to a {@code QueryCache} * * @return delay seconds. */ public int getDelaySeconds() { return delaySeconds; } /** * Sets the minimum number of delay seconds which an event waits in the buffer of node * before sending to a {@code QueryCache} * * @param delaySeconds the delay seconds. * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setDelaySeconds(int delaySeconds) { checkNotNegative(delaySeconds, "delaySeconds"); this.delaySeconds = delaySeconds; return this; } /** * Returns memory format of values of entries in {@code QueryCache}. * <p/> * Default value is binary. * * @return memory format of values of entries in {@code QueryCache}. */ public InMemoryFormat getInMemoryFormat() { return inMemoryFormat; } /** * Sets memory format of values of entries in {@code QueryCache}. * <p/> * Default value is binary. * * @param inMemoryFormat the memory format * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setInMemoryFormat(InMemoryFormat inMemoryFormat) { checkNotNull(inMemoryFormat, "inMemoryFormat cannot be null"); checkFalse(inMemoryFormat == InMemoryFormat.NATIVE, "InMemoryFormat." + inMemoryFormat + " is not supported."); this.inMemoryFormat = inMemoryFormat; return this; } /** * Returns {@code true} if value caching enabled, otherwise returns {@code false}. * <p/> * Default value is {@value #DEFAULT_INCLUDE_VALUE}. * * @return {@code true} if value caching enabled, otherwise returns {@code false}. */ public boolean isIncludeValue() { return includeValue; } /** * Set {@code true} to enable value caching, otherwise set {@code false}. * <p/> * Default value is {@value #DEFAULT_INCLUDE_VALUE}. * * @param includeValue Set {@code true} if value caching is enabled, otherwise set {@code false}. * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setIncludeValue(boolean includeValue) { this.includeValue = includeValue; return this; } /** * Returns {@code true} if initial population of {@code QueryCache} is enabled, otherwise returns {@code false}. * * <p/> * Default value is {@value #DEFAULT_POPULATE}. * * @return {@code true} if initial population of {@code QueryCache} is enabled, otherwise returns {@code false}. */ public boolean isPopulate() { return populate; } /** * Set {@code true} to enable initial population, otherwise set {@code false}. * <p/> * Default value is {@value #DEFAULT_POPULATE}. * * @param populate set {@code true} to enable initial population, otherwise set {@code false} * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setPopulate(boolean populate) { this.populate = populate; return this; } /** * Returns {@code true} if coalescing is is enabled, otherwise returns {@code false}. * <p/> * Default value is {@value #DEFAULT_COALESCE}. * * @return {@code true} if coalescing is is enabled, otherwise returns {@code false}. * @see #setCoalesce */ public boolean isCoalesce() { return coalesce; } /** * Set {@code true} to enable coalescing, otherwise set {@code false}. * This setting is only valid if {@code QueryCacheConfig#delaySeconds} is greater than 0. * <p/> * Default value is {@value #DEFAULT_COALESCE}. * * @param coalesce set {@code true} to enable, otherwise set {@code false} */ public QueryCacheConfig setCoalesce(boolean coalesce) { this.coalesce = coalesce; return this; } /** * Returns {@link EvictionConfig} instance for this {@code QueryCache} * * @return the {@link EvictionConfig} instance for this {@code QueryCache}. */ public EvictionConfig getEvictionConfig() { return evictionConfig; } /** * Sets the {@link EvictionConfig} instance for this {@code QueryCache} * * @param evictionConfig the {@link EvictionConfig} instance for eviction configuration to set * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig setEvictionConfig(EvictionConfig evictionConfig) { checkNotNull(evictionConfig, "evictionConfig cannot be null"); this.evictionConfig = evictionConfig; return this; } /** * Adds {@link EntryListenerConfig} to this {@code QueryCacheConfig}. * * @param listenerConfig the {@link EntryListenerConfig} to add * @return this {@code QueryCacheConfig} instance. */ public QueryCacheConfig addEntryListenerConfig(EntryListenerConfig listenerConfig) { checkNotNull(listenerConfig, "listenerConfig cannot be null"); getEntryListenerConfigs().add(listenerConfig); return this; } public List<EntryListenerConfig> getEntryListenerConfigs() { if (entryListenerConfigs == null) { entryListenerConfigs = new ArrayList<EntryListenerConfig>(); } return entryListenerConfigs; } public QueryCacheConfig setEntryListenerConfigs(List<EntryListenerConfig> listenerConfigs) { checkNotNull(listenerConfigs, "listenerConfig cannot be null"); this.entryListenerConfigs = listenerConfigs; return this; } public QueryCacheConfig addIndexConfig(MapIndexConfig mapIndexConfig) { getIndexConfigs().add(mapIndexConfig); return this; } public List<MapIndexConfig> getIndexConfigs() { if (indexConfigs == null) { indexConfigs = new ArrayList<MapIndexConfig>(); } return indexConfigs; } public QueryCacheConfig setIndexConfigs(List<MapIndexConfig> indexConfigs) { this.indexConfigs = indexConfigs; return this; } @Override public String toString() { return "QueryCacheConfig{" + "batchSize=" + batchSize + ", bufferSize=" + bufferSize + ", delaySeconds=" + delaySeconds + ", includeValue=" + includeValue + ", populate=" + populate + ", coalesce=" + coalesce + ", inMemoryFormat=" + inMemoryFormat + ", name='" + name + '\'' + ", predicateConfig=" + predicateConfig + ", evictionConfig=" + evictionConfig + ", entryListenerConfigs=" + entryListenerConfigs + ", indexConfigs=" + indexConfigs + '}'; } }
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.roadm; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Range; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.net.ChannelSpacing; import org.onosproject.net.ConnectPoint; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.Direction; import org.onosproject.net.OchSignal; import org.onosproject.net.OchSignalType; import org.onosproject.net.Port; import org.onosproject.net.PortNumber; import org.onosproject.net.behaviour.LambdaQuery; import org.onosproject.net.behaviour.PowerConfig; import org.onosproject.net.behaviour.protection.ProtectedTransportEndpointState; import org.onosproject.net.behaviour.protection.ProtectionConfigBehaviour; import org.onosproject.net.behaviour.protection.TransportEndpointState; import org.onosproject.net.device.DeviceEvent; import org.onosproject.net.device.DeviceListener; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flow.DefaultFlowRule; import org.onosproject.net.flow.DefaultTrafficSelector; import org.onosproject.net.flow.DefaultTrafficTreatment; import org.onosproject.net.flow.FlowEntry; import org.onosproject.net.flow.FlowId; import org.onosproject.net.flow.FlowRule; import org.onosproject.net.flow.FlowRuleService; import org.onosproject.net.flow.TrafficSelector; import org.onosproject.net.flow.TrafficTreatment; import org.onosproject.net.flow.criteria.Criteria; import org.onosproject.net.flow.instructions.Instructions; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static com.google.common.base.Preconditions.checkNotNull; import static org.onosproject.net.optical.OpticalAnnotations.INPUT_PORT_STATUS; import static org.onosproject.roadm.RoadmUtil.OPS_OPT_AUTO; import static org.onosproject.roadm.RoadmUtil.OPS_OPT_FORCE; import static org.onosproject.roadm.RoadmUtil.OPS_OPT_MANUAL; /** * Application for monitoring and configuring ROADM devices. */ @Component(immediate = true, service = RoadmService.class) public class RoadmManager implements RoadmService { private static final String APP_NAME = "org.onosproject.roadm"; private ApplicationId appId; private final Logger log = LoggerFactory.getLogger(getClass()); private DeviceListener deviceListener = new InternalDeviceListener(); @Reference(cardinality = ReferenceCardinality.MANDATORY) protected RoadmStore roadmStore; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected DeviceService deviceService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected FlowRuleService flowRuleService; @Activate protected void activate() { appId = coreService.registerApplication(APP_NAME); deviceService.addListener(deviceListener); initDevices(); log.info("Started"); } @Deactivate protected void deactivate() { deviceService.removeListener(deviceListener); log.info("Stopped"); } @Deprecated @Override public void setProtectionSwitchWorkingPath(DeviceId deviceId, int index) { checkNotNull(deviceId); ProtectionConfigBehaviour behaviour = getProtectionConfig(deviceId); if (behaviour == null) { return; } Map<ConnectPoint, ProtectedTransportEndpointState> map = getProtectionSwitchStates(behaviour); if (map == null) { log.warn("Failed to get protected transport endpoint state in device {}", deviceId); return; } if (map.isEmpty()) { log.warn("No protected transport endpoint state found in device {}", deviceId); return; } behaviour.switchWorkingPath(map.keySet().toArray(new ConnectPoint[0])[0], index); } @Deprecated @Override public String getProtectionSwitchPortState(DeviceId deviceId, PortNumber portNumber) { checkNotNull(deviceId); ProtectionConfigBehaviour behaviour = getProtectionConfig(deviceId); if (behaviour == null) { return null; } Map<ConnectPoint, ProtectedTransportEndpointState> map = getProtectionSwitchStates(behaviour); if (map == null) { log.warn("Failed to get protected transport endpoint state in device {}", deviceId); return null; } for (ProtectedTransportEndpointState state : map.values()) { for (TransportEndpointState element : state.pathStates()) { if (element.description().output().connectPoint().port().equals(portNumber)) { return element.attributes().get(INPUT_PORT_STATUS); } } } // Do not need warning here for port polling. log.debug("Unable to get port status, device: {}, port: {}", deviceId, portNumber); return null; } @Override public void configProtectionSwitch(DeviceId deviceId, String operation, ConnectPoint identifier, int index) { checkNotNull(deviceId); ProtectionConfigBehaviour behaviour = getProtectionConfig(deviceId); if (behaviour == null) { return; } // automatic operation if (OPS_OPT_AUTO.equals(operation)) { behaviour.switchToAutomatic(identifier); return; } // force or manual operation if (OPS_OPT_MANUAL.equals(operation)) { behaviour.switchToManual(identifier, index); } else if (OPS_OPT_FORCE.equals(operation)) { behaviour.switchToForce(identifier, index); } } @Override public Map<ConnectPoint, ProtectedTransportEndpointState> getProtectionSwitchStates(DeviceId deviceId) { checkNotNull(deviceId); ProtectionConfigBehaviour behaviour = getProtectionConfig(deviceId); if (behaviour == null) { return ImmutableMap.of(); } return getProtectionSwitchStates(behaviour); } @Override public void setTargetPortPower(DeviceId deviceId, PortNumber portNumber, long power) { checkNotNull(deviceId); checkNotNull(portNumber); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { roadmStore.setTargetPower(deviceId, portNumber, power); powerConfig.setTargetPower(portNumber, Direction.ALL, power); } else { log.warn("Unable to set target port power for device {}", deviceId); } } @Override public Long getTargetPortPower(DeviceId deviceId, PortNumber portNumber) { checkNotNull(deviceId); checkNotNull(portNumber); // getTargetPortPower is not yet implemented in PowerConfig so we access store instead return roadmStore.getTargetPower(deviceId, portNumber); } @Override public void setAttenuation(DeviceId deviceId, PortNumber portNumber, OchSignal ochSignal, long attenuation) { checkNotNull(deviceId); checkNotNull(portNumber); checkNotNull(ochSignal); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { powerConfig.setTargetPower(portNumber, ochSignal, attenuation); } else { log.warn("Cannot set attenuation for channel index {} on device {}", ochSignal.spacingMultiplier(), deviceId); } } @Override public Long getAttenuation(DeviceId deviceId, PortNumber portNumber, OchSignal ochSignal) { checkNotNull(deviceId); checkNotNull(portNumber); checkNotNull(ochSignal); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Long> attenuation = powerConfig.getTargetPower(portNumber, ochSignal); if (attenuation.isPresent()) { return attenuation.get(); } } return null; } @Override public Long getCurrentPortPower(DeviceId deviceId, PortNumber portNumber) { checkNotNull(deviceId); checkNotNull(portNumber); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Long> currentPower = powerConfig.currentPower(portNumber, Direction.ALL); if (currentPower.isPresent()) { return currentPower.get(); } } return null; } @Override public Long getCurrentChannelPower(DeviceId deviceId, PortNumber portNumber, OchSignal ochSignal) { checkNotNull(deviceId); checkNotNull(portNumber); checkNotNull(ochSignal); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Long> currentPower = powerConfig.currentPower(portNumber, ochSignal); if (currentPower.isPresent()) { return currentPower.get(); } } return null; } @Override public Set<OchSignal> queryLambdas(DeviceId deviceId, PortNumber portNumber) { checkNotNull(deviceId); checkNotNull(portNumber); LambdaQuery lambdaQuery = getLambdaQuery(deviceId); if (lambdaQuery != null) { return lambdaQuery.queryLambdas(portNumber); } return Collections.emptySet(); } @Override public FlowId createConnection(DeviceId deviceId, int priority, boolean isPermanent, int timeout, PortNumber inPort, PortNumber outPort, OchSignal ochSignal) { checkNotNull(deviceId); checkNotNull(inPort); checkNotNull(outPort); TrafficSelector selector = DefaultTrafficSelector.builder() .add(Criteria.matchInPort(inPort)) .add(Criteria.matchOchSignalType(OchSignalType.FIXED_GRID)) .add(Criteria.matchLambda(ochSignal)) .build(); TrafficTreatment treatment = DefaultTrafficTreatment.builder() .add(Instructions.createOutput(outPort)) .build(); FlowRule.Builder flowBuilder = DefaultFlowRule.builder() .forDevice(deviceId) .fromApp(appId) .withPriority(priority) .withSelector(selector) .withTreatment(treatment); if (isPermanent) { flowBuilder.makePermanent(); } else { flowBuilder.makeTemporary(timeout); } FlowRule flowRule = flowBuilder.build(); flowRuleService.applyFlowRules(flowRule); log.info("Created connection from input port {} to output port {}", inPort.toLong(), outPort.toLong()); return flowRule.id(); } @Override public FlowId createConnection(DeviceId deviceId, int priority, boolean isPermanent, int timeout, PortNumber inPort, PortNumber outPort, OchSignal ochSignal, long attenuation) { checkNotNull(deviceId); checkNotNull(inPort); checkNotNull(outPort); FlowId flowId = createConnection(deviceId, priority, isPermanent, timeout, inPort, outPort, ochSignal); delayedSetAttenuation(deviceId, outPort, ochSignal, attenuation); return flowId; } @Override public void removeConnection(DeviceId deviceId, FlowId flowId) { checkNotNull(deviceId); checkNotNull(flowId); for (FlowEntry entry : flowRuleService.getFlowEntries(deviceId)) { if (entry.id().equals(flowId)) { flowRuleService.removeFlowRules(entry); log.info("Deleted connection {}", entry.id()); break; } } } @Override public boolean hasPortTargetPower(DeviceId deviceId, PortNumber portNumber) { checkNotNull(deviceId); checkNotNull(portNumber); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Range<Long>> range = powerConfig.getTargetPowerRange(portNumber, Direction.ALL); return range.isPresent(); } return false; } @Override public boolean portTargetPowerInRange(DeviceId deviceId, PortNumber portNumber, long power) { checkNotNull(deviceId); checkNotNull(portNumber); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Range<Long>> range = powerConfig.getTargetPowerRange(portNumber, Direction.ALL); return range.isPresent() && range.get().contains(power); } return false; } @Override public boolean attenuationInRange(DeviceId deviceId, PortNumber outPort, long att) { checkNotNull(deviceId); checkNotNull(outPort); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { OchSignal stubOch = OchSignal.newDwdmSlot(ChannelSpacing.CHL_50GHZ, 0); Optional<Range<Long>> range = powerConfig.getTargetPowerRange(outPort, stubOch); return range.isPresent() && range.get().contains(att); } return false; } @Override public boolean validInputPort(DeviceId deviceId, PortNumber portNumber) { checkNotNull(deviceId); checkNotNull(portNumber); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Range<Long>> range = powerConfig.getInputPowerRange(portNumber, Direction.ALL); return range.isPresent(); } return false; } @Override public boolean validOutputPort(DeviceId deviceId, PortNumber portNumber) { return hasPortTargetPower(deviceId, portNumber); } @Override public boolean validChannel(DeviceId deviceId, PortNumber portNumber, OchSignal ochSignal) { checkNotNull(deviceId); checkNotNull(portNumber); checkNotNull(ochSignal); LambdaQuery lambdaQuery = getLambdaQuery(deviceId); if (lambdaQuery != null) { Set<OchSignal> channels = lambdaQuery.queryLambdas(portNumber); return channels.contains(ochSignal); } return false; } @Override public boolean channelAvailable(DeviceId deviceId, OchSignal ochSignal) { checkNotNull(deviceId); checkNotNull(ochSignal); for (FlowEntry entry : flowRuleService.getFlowEntries(deviceId)) { if (ChannelData.fromFlow(entry).ochSignal().equals(ochSignal)) { return false; } } return true; } @Override public boolean validConnection(DeviceId deviceId, PortNumber inPort, PortNumber outPort) { checkNotNull(deviceId); checkNotNull(inPort); checkNotNull(outPort); return validInputPort(deviceId, inPort) && validOutputPort(deviceId, outPort); } @Override public Range<Long> targetPortPowerRange(DeviceId deviceId, PortNumber portNumber) { checkNotNull(deviceId); checkNotNull(portNumber); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Range<Long>> range = powerConfig.getTargetPowerRange(portNumber, Direction.ALL); if (range.isPresent()) { return range.get(); } } return null; } @Override public Range<Long> attenuationRange(DeviceId deviceId, PortNumber portNumber, OchSignal ochSignal) { checkNotNull(deviceId); checkNotNull(portNumber); checkNotNull(ochSignal); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Range<Long>> range = powerConfig.getTargetPowerRange(portNumber, ochSignal); if (range.isPresent()) { return range.get(); } } return null; } @Override public Range<Long> inputPortPowerRange(DeviceId deviceId, PortNumber portNumber) { checkNotNull(deviceId); checkNotNull(portNumber); PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig != null) { Optional<Range<Long>> range = powerConfig.getInputPowerRange(portNumber, Direction.ALL); if (range.isPresent()) { return range.get(); } } return null; } private PowerConfig<Object> getPowerConfig(DeviceId deviceId) { Device device = deviceService.getDevice(deviceId); if (device != null && device.is(PowerConfig.class)) { return device.as(PowerConfig.class); } // Do not need warning here for port polling. log.debug("Unable to load PowerConfig for {}", deviceId); return null; } private LambdaQuery getLambdaQuery(DeviceId deviceId) { Device device = deviceService.getDevice(deviceId); if (device != null && device.is(LambdaQuery.class)) { return device.as(LambdaQuery.class); } // Do not need warning here for port polling. log.debug("Unable to load LambdaQuery for {}", deviceId); return null; } private ProtectionConfigBehaviour getProtectionConfig(DeviceId deviceId) { Device device = deviceService.getDevice(deviceId); if (device != null && device.is(ProtectionConfigBehaviour.class)) { return device.as(ProtectionConfigBehaviour.class); } // Do not need warning here for port polling. log.debug("Unable to load ProtectionConfigBehaviour for {}", deviceId); return null; } // Initialize all devices private void initDevices() { for (Device device : deviceService.getDevices(Device.Type.ROADM)) { initDevice(device.id()); //FIXME // As roadm application is a optional tool for now. // The target power initialization will be enhanced later, // hopefully using an formal optical subsystem. // setAllInitialTargetPortPowers(device.id()); } } // Initialize RoadmStore for a device to support target power private void initDevice(DeviceId deviceId) { if (!roadmStore.deviceAvailable(deviceId)) { roadmStore.addDevice(deviceId); } log.info("Initialized device {}", deviceId); } // Sets the target port powers for a port on a device // Attempts to read target powers from store. If no value is found then // default value is used instead. private void setInitialTargetPortPower(DeviceId deviceId, PortNumber portNumber) { PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig == null) { log.warn("Unable to set default initial powers for port {} on device {}", portNumber, deviceId); return; } Optional<Range<Long>> range = powerConfig.getTargetPowerRange(portNumber, Direction.ALL); if (!range.isPresent()) { log.warn("No target power range found for port {} on device {}", portNumber, deviceId); return; } Long power = roadmStore.getTargetPower(deviceId, portNumber); if (power == null) { // Set default to middle of the range power = (range.get().lowerEndpoint() + range.get().upperEndpoint()) / 2; roadmStore.setTargetPower(deviceId, portNumber, power); } powerConfig.setTargetPower(portNumber, Direction.ALL, power); } // Sets the target port powers for each each port on a device // Attempts to read target powers from store. If no value is found then // default value is used instead private void setAllInitialTargetPortPowers(DeviceId deviceId) { PowerConfig<Object> powerConfig = getPowerConfig(deviceId); if (powerConfig == null) { log.warn("Unable to set default initial powers for device {}", deviceId); return; } List<Port> ports = deviceService.getPorts(deviceId); for (Port port : ports) { Optional<Range<Long>> range = powerConfig.getTargetPowerRange(port.number(), Direction.ALL); if (range.isPresent()) { Long power = roadmStore.getTargetPower(deviceId, port.number()); if (power == null) { // Set default to middle of the range power = (range.get().lowerEndpoint() + range.get().upperEndpoint()) / 2; roadmStore.setTargetPower(deviceId, port.number(), power); } powerConfig.setTargetPower(port.number(), Direction.ALL, power); } else { log.warn("No target power range found for port {} on device {}", port.number(), deviceId); } } } // Delay the call to setTargetPower because the flow may not be in the store yet private void delayedSetAttenuation(DeviceId deviceId, PortNumber outPort, OchSignal ochSignal, long attenuation) { Runnable setAtt = () -> { try { TimeUnit.SECONDS.sleep(1); } catch (InterruptedException e) { log.warn("Thread interrupted. Setting attenuation early."); Thread.currentThread().interrupt(); } setAttenuation(deviceId, outPort, ochSignal, attenuation); }; new Thread(setAtt).start(); } // get protection endpoint states private Map<ConnectPoint, ProtectedTransportEndpointState> getProtectionSwitchStates( ProtectionConfigBehaviour behaviour) { Map<ConnectPoint, ProtectedTransportEndpointState> map; try { map = behaviour.getProtectionEndpointStates().get(); } catch (InterruptedException e1) { log.error("Interrupted.", e1); Thread.currentThread().interrupt(); return ImmutableMap.of(); } catch (ExecutionException e1) { log.error("Exception caught.", e1); return ImmutableMap.of(); } return map; } // Listens to device events. private class InternalDeviceListener implements DeviceListener { @Override public void event(DeviceEvent deviceEvent) { Device device = deviceEvent.subject(); switch (deviceEvent.type()) { case DEVICE_ADDED: case DEVICE_UPDATED: initDevice(device.id()); break; case PORT_ADDED: case PORT_UPDATED: //FIXME // As roadm application is a optional tool for now. // The target power initialization will be enhanced later, // hopefully using an formal optical subsystem. // setInitialTargetPortPower(device.id(), deviceEvent.port().number()); break; default: break; } } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.groupby.epinephelinae; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.base.Supplier; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import com.google.common.util.concurrent.ListeningExecutorService; import io.druid.collections.ReferenceCountingResourceHolder; import io.druid.common.utils.IntArrayUtils; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.AllGranularity; import io.druid.java.util.common.guava.Accumulator; import io.druid.query.BaseQuery; import io.druid.query.ColumnSelectorPlus; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.dimension.ColumnSelectorStrategy; import io.druid.query.dimension.ColumnSelectorStrategyFactory; import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.RowBasedColumnSelectorFactory; import io.druid.query.groupby.epinephelinae.Grouper.BufferComparator; import io.druid.query.groupby.orderby.DefaultLimitSpec; import io.druid.query.groupby.orderby.OrderByColumnSpec; import io.druid.query.groupby.strategy.GroupByStrategyV2; import io.druid.query.ordering.StringComparator; import io.druid.query.ordering.StringComparators; import io.druid.segment.BaseDoubleColumnValueSelector; import io.druid.segment.BaseFloatColumnValueSelector; import io.druid.segment.BaseLongColumnValueSelector; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnValueSelector; import io.druid.segment.DimensionHandlerUtils; import io.druid.segment.DimensionSelector; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ValueType; import io.druid.segment.data.IndexedInts; import it.unimi.dsi.fastutil.ints.IntArrays; import it.unimi.dsi.fastutil.objects.Object2IntMap; import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; import org.joda.time.DateTime; import javax.annotation.Nullable; import java.io.Closeable; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.IntStream; // this class contains shared code between GroupByMergingQueryRunnerV2 and GroupByRowProcessor public class RowBasedGrouperHelper { // Entry in dictionary, node pointer in reverseDictionary, hash + k/v/next pointer in reverseDictionary nodes private static final int ROUGH_OVERHEAD_PER_DICTIONARY_ENTRY = Long.BYTES * 5 + Integer.BYTES; private static final int SINGLE_THREAD_CONCURRENCY_HINT = -1; private static final int UNKNOWN_THREAD_PRIORITY = -1; private static final long UNKNOWN_TIMEOUT = -1L; /** * Create a single-threaded grouper and accumulator. */ public static Pair<Grouper<RowBasedKey>, Accumulator<AggregateResult, Row>> createGrouperAccumulatorPair( final GroupByQuery query, final boolean isInputRaw, final Map<String, ValueType> rawInputRowSignature, final GroupByQueryConfig config, final Supplier<ByteBuffer> bufferSupplier, final LimitedTemporaryStorage temporaryStorage, final ObjectMapper spillMapper, final AggregatorFactory[] aggregatorFactories, final int mergeBufferSize ) { return createGrouperAccumulatorPair( query, isInputRaw, rawInputRowSignature, config, bufferSupplier, null, SINGLE_THREAD_CONCURRENCY_HINT, temporaryStorage, spillMapper, aggregatorFactories, null, UNKNOWN_THREAD_PRIORITY, false, UNKNOWN_TIMEOUT, mergeBufferSize ); } /** * If isInputRaw is true, transformations such as timestamp truncation and extraction functions have not * been applied to the input rows yet, for example, in a nested query, if an extraction function is being * applied in the outer query to a field of the inner query. This method must apply those transformations. */ public static Pair<Grouper<RowBasedKey>, Accumulator<AggregateResult, Row>> createGrouperAccumulatorPair( final GroupByQuery query, final boolean isInputRaw, final Map<String, ValueType> rawInputRowSignature, final GroupByQueryConfig config, final Supplier<ByteBuffer> bufferSupplier, @Nullable final ReferenceCountingResourceHolder<ByteBuffer> combineBufferHolder, final int concurrencyHint, final LimitedTemporaryStorage temporaryStorage, final ObjectMapper spillMapper, final AggregatorFactory[] aggregatorFactories, @Nullable final ListeningExecutorService grouperSorter, final int priority, final boolean hasQueryTimeout, final long queryTimeoutAt, final int mergeBufferSize ) { // concurrencyHint >= 1 for concurrent groupers, -1 for single-threaded Preconditions.checkArgument(concurrencyHint >= 1 || concurrencyHint == -1, "invalid concurrencyHint"); final List<ValueType> valueTypes = DimensionHandlerUtils.getValueTypesFromDimensionSpecs(query.getDimensions()); final GroupByQueryConfig querySpecificConfig = config.withOverrides(query); final boolean includeTimestamp = GroupByStrategyV2.getUniversalTimestamp(query) == null; final ThreadLocal<Row> columnSelectorRow = new ThreadLocal<>(); final ColumnSelectorFactory columnSelectorFactory = query.getVirtualColumns().wrap( RowBasedColumnSelectorFactory.create( columnSelectorRow, rawInputRowSignature ) ); final boolean willApplyLimitPushDown = query.isApplyLimitPushDown(); final DefaultLimitSpec limitSpec = willApplyLimitPushDown ? (DefaultLimitSpec) query.getLimitSpec() : null; boolean sortHasNonGroupingFields = false; if (willApplyLimitPushDown) { sortHasNonGroupingFields = DefaultLimitSpec.sortingOrderHasNonGroupingFields( limitSpec, query.getDimensions() ); } final Grouper.KeySerdeFactory<RowBasedKey> keySerdeFactory = new RowBasedKeySerdeFactory( includeTimestamp, query.getContextSortByDimsFirst(), query.getDimensions(), querySpecificConfig.getMaxMergingDictionarySize() / (concurrencyHint == -1 ? 1 : concurrencyHint), valueTypes, aggregatorFactories, limitSpec ); final Grouper<RowBasedKey> grouper; if (concurrencyHint == -1) { grouper = new SpillingGrouper<>( bufferSupplier, keySerdeFactory, columnSelectorFactory, aggregatorFactories, querySpecificConfig.getBufferGrouperMaxSize(), querySpecificConfig.getBufferGrouperMaxLoadFactor(), querySpecificConfig.getBufferGrouperInitialBuckets(), temporaryStorage, spillMapper, true, limitSpec, sortHasNonGroupingFields, mergeBufferSize ); } else { final Grouper.KeySerdeFactory<RowBasedKey> combineKeySerdeFactory = new RowBasedKeySerdeFactory( includeTimestamp, query.getContextSortByDimsFirst(), query.getDimensions(), querySpecificConfig.getMaxMergingDictionarySize(), // use entire dictionary space for combining key serde valueTypes, aggregatorFactories, limitSpec ); grouper = new ConcurrentGrouper<>( querySpecificConfig, bufferSupplier, combineBufferHolder, keySerdeFactory, combineKeySerdeFactory, columnSelectorFactory, aggregatorFactories, temporaryStorage, spillMapper, concurrencyHint, limitSpec, sortHasNonGroupingFields, grouperSorter, priority, hasQueryTimeout, queryTimeoutAt ); } final int keySize = includeTimestamp ? query.getDimensions().size() + 1 : query.getDimensions().size(); final ValueExtractFunction valueExtractFn = makeValueExtractFunction( query, isInputRaw, includeTimestamp, columnSelectorFactory, valueTypes ); final Accumulator<AggregateResult, Row> accumulator = new Accumulator<AggregateResult, Row>() { @Override public AggregateResult accumulate( final AggregateResult priorResult, final Row row ) { BaseQuery.checkInterrupted(); if (priorResult != null && !priorResult.isOk()) { // Pass-through error returns without doing more work. return priorResult; } if (!grouper.isInitialized()) { grouper.init(); } columnSelectorRow.set(row); final Comparable[] key = new Comparable[keySize]; valueExtractFn.apply(row, key); final AggregateResult aggregateResult = grouper.aggregate(new RowBasedKey(key)); columnSelectorRow.set(null); return aggregateResult; } }; return new Pair<>(grouper, accumulator); } private interface TimestampExtractFunction { long apply(Row row); } private static TimestampExtractFunction makeTimestampExtractFunction( final GroupByQuery query, final boolean isInputRaw ) { if (isInputRaw) { if (query.getGranularity() instanceof AllGranularity) { return new TimestampExtractFunction() { @Override public long apply(Row row) { return query.getIntervals().get(0).getStartMillis(); } }; } else { return new TimestampExtractFunction() { @Override public long apply(Row row) { return query.getGranularity().bucketStart(row.getTimestamp()).getMillis(); } }; } } else { return new TimestampExtractFunction() { @Override public long apply(Row row) { return row.getTimestampFromEpoch(); } }; } } private interface ValueExtractFunction { Comparable[] apply(Row row, Comparable[] key); } private static ValueExtractFunction makeValueExtractFunction( final GroupByQuery query, final boolean isInputRaw, final boolean includeTimestamp, final ColumnSelectorFactory columnSelectorFactory, final List<ValueType> valueTypes ) { final TimestampExtractFunction timestampExtractFn = includeTimestamp ? makeTimestampExtractFunction(query, isInputRaw) : null; final Function<Comparable, Comparable>[] valueConvertFns = makeValueConvertFunctions(valueTypes); if (isInputRaw) { final Supplier<Comparable>[] inputRawSuppliers = getValueSuppliersForDimensions( columnSelectorFactory, query.getDimensions() ); if (includeTimestamp) { return new ValueExtractFunction() { @Override public Comparable[] apply(Row row, Comparable[] key) { key[0] = timestampExtractFn.apply(row); for (int i = 1; i < key.length; i++) { final Comparable val = inputRawSuppliers[i - 1].get(); key[i] = valueConvertFns[i - 1].apply(val); } return key; } }; } else { return new ValueExtractFunction() { @Override public Comparable[] apply(Row row, Comparable[] key) { for (int i = 0; i < key.length; i++) { final Comparable val = inputRawSuppliers[i].get(); key[i] = valueConvertFns[i].apply(val); } return key; } }; } } else { if (includeTimestamp) { return new ValueExtractFunction() { @Override public Comparable[] apply(Row row, Comparable[] key) { key[0] = timestampExtractFn.apply(row); for (int i = 1; i < key.length; i++) { final Comparable val = (Comparable) row.getRaw(query.getDimensions().get(i - 1).getOutputName()); key[i] = valueConvertFns[i - 1].apply(val); } return key; } }; } else { return new ValueExtractFunction() { @Override public Comparable[] apply(Row row, Comparable[] key) { for (int i = 0; i < key.length; i++) { final Comparable val = (Comparable) row.getRaw(query.getDimensions().get(i).getOutputName()); key[i] = valueConvertFns[i].apply(val); } return key; } }; } } } public static CloseableGrouperIterator<RowBasedKey, Row> makeGrouperIterator( final Grouper<RowBasedKey> grouper, final GroupByQuery query, final Closeable closeable ) { final boolean includeTimestamp = GroupByStrategyV2.getUniversalTimestamp(query) == null; return new CloseableGrouperIterator<>( grouper, true, new Function<Grouper.Entry<RowBasedKey>, Row>() { @Override public Row apply(Grouper.Entry<RowBasedKey> entry) { Map<String, Object> theMap = Maps.newLinkedHashMap(); // Get timestamp, maybe. final DateTime timestamp; final int dimStart; if (includeTimestamp) { timestamp = query.getGranularity().toDateTime(((long) (entry.getKey().getKey()[0]))); dimStart = 1; } else { timestamp = null; dimStart = 0; } // Add dimensions. for (int i = dimStart; i < entry.getKey().getKey().length; i++) { Object dimVal = entry.getKey().getKey()[i]; theMap.put( query.getDimensions().get(i - dimStart).getOutputName(), dimVal instanceof String ? Strings.emptyToNull((String) dimVal) : dimVal ); } // Add aggregations. for (int i = 0; i < entry.getValues().length; i++) { theMap.put(query.getAggregatorSpecs().get(i).getName(), entry.getValues()[i]); } return new MapBasedRow(timestamp, theMap); } }, closeable ); } static class RowBasedKey { private final Object[] key; RowBasedKey(final Object[] key) { this.key = key; } @JsonCreator public static RowBasedKey fromJsonArray(final Object[] key) { // Type info is lost during serde: // Floats may be deserialized as doubles, Longs may be deserialized as integers, convert them back for (int i = 0; i < key.length; i++) { if (key[i] instanceof Integer) { key[i] = ((Integer) key[i]).longValue(); } else if (key[i] instanceof Double) { key[i] = ((Double) key[i]).floatValue(); } } return new RowBasedKey(key); } @JsonValue public Object[] getKey() { return key; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } RowBasedKey that = (RowBasedKey) o; return Arrays.equals(key, that.key); } @Override public int hashCode() { return Arrays.hashCode(key); } @Override public String toString() { return Arrays.toString(key); } } private static final InputRawSupplierColumnSelectorStrategyFactory STRATEGY_FACTORY = new InputRawSupplierColumnSelectorStrategyFactory(); private interface InputRawSupplierColumnSelectorStrategy<ValueSelectorType> extends ColumnSelectorStrategy { Supplier<Comparable> makeInputRawSupplier(ValueSelectorType selector); } private static class StringInputRawSupplierColumnSelectorStrategy implements InputRawSupplierColumnSelectorStrategy<DimensionSelector> { @Override public Supplier<Comparable> makeInputRawSupplier(DimensionSelector selector) { return new Supplier<Comparable>() { @Override public Comparable get() { final String value; IndexedInts index = selector.getRow(); value = index.size() == 0 ? "" : selector.lookupName(index.get(0)); return Strings.nullToEmpty(value); } }; } } private static class InputRawSupplierColumnSelectorStrategyFactory implements ColumnSelectorStrategyFactory<InputRawSupplierColumnSelectorStrategy> { @Override public InputRawSupplierColumnSelectorStrategy makeColumnSelectorStrategy( ColumnCapabilities capabilities, ColumnValueSelector selector ) { ValueType type = capabilities.getType(); switch (type) { case STRING: return new StringInputRawSupplierColumnSelectorStrategy(); case LONG: return (InputRawSupplierColumnSelectorStrategy<BaseLongColumnValueSelector>) columnSelector -> columnSelector::getLong; case FLOAT: return (InputRawSupplierColumnSelectorStrategy<BaseFloatColumnValueSelector>) columnSelector -> columnSelector::getFloat; case DOUBLE: return (InputRawSupplierColumnSelectorStrategy<BaseDoubleColumnValueSelector>) columnSelector -> columnSelector::getDouble; default: throw new IAE("Cannot create query type helper from invalid type [%s]", type); } } } @SuppressWarnings("unchecked") private static Supplier<Comparable>[] getValueSuppliersForDimensions( final ColumnSelectorFactory columnSelectorFactory, final List<DimensionSpec> dimensions ) { final Supplier[] inputRawSuppliers = new Supplier[dimensions.size()]; final ColumnSelectorPlus[] selectorPluses = DimensionHandlerUtils.createColumnSelectorPluses( STRATEGY_FACTORY, dimensions, columnSelectorFactory ); for (int i = 0; i < selectorPluses.length; i++) { final ColumnSelectorPlus<InputRawSupplierColumnSelectorStrategy> selectorPlus = selectorPluses[i]; final InputRawSupplierColumnSelectorStrategy strategy = selectorPlus.getColumnSelectorStrategy(); inputRawSuppliers[i] = strategy.makeInputRawSupplier(selectorPlus.getSelector()); } return inputRawSuppliers; } @SuppressWarnings("unchecked") private static Function<Comparable, Comparable>[] makeValueConvertFunctions( final List<ValueType> valueTypes ) { final Function<Comparable, Comparable>[] functions = new Function[valueTypes.size()]; for (int i = 0; i < functions.length; i++) { ValueType type = valueTypes.get(i); // Subquery post-aggs aren't added to the rowSignature (see rowSignatureFor() in GroupByQueryHelper) because // their types aren't known, so default to String handling. type = type == null ? ValueType.STRING : type; switch (type) { case STRING: functions[i] = input -> input == null ? "" : input.toString(); break; case LONG: functions[i] = input -> { final Long val = DimensionHandlerUtils.convertObjectToLong(input); return val == null ? 0L : val; }; break; case FLOAT: functions[i] = input -> { final Float val = DimensionHandlerUtils.convertObjectToFloat(input); return val == null ? 0.f : val; }; break; case DOUBLE: functions[i] = input -> { Double val = DimensionHandlerUtils.convertObjectToDouble(input); return val == null ? 0.0 : val; }; break; default: throw new IAE("invalid type: [%s]", type); } } return functions; } private static class RowBasedKeySerdeFactory implements Grouper.KeySerdeFactory<RowBasedKey> { private final boolean includeTimestamp; private final boolean sortByDimsFirst; private final int dimCount; private final long maxDictionarySize; private final DefaultLimitSpec limitSpec; private final List<DimensionSpec> dimensions; final AggregatorFactory[] aggregatorFactories; private final List<ValueType> valueTypes; RowBasedKeySerdeFactory( boolean includeTimestamp, boolean sortByDimsFirst, List<DimensionSpec> dimensions, long maxDictionarySize, List<ValueType> valueTypes, final AggregatorFactory[] aggregatorFactories, DefaultLimitSpec limitSpec ) { this.includeTimestamp = includeTimestamp; this.sortByDimsFirst = sortByDimsFirst; this.dimensions = dimensions; this.dimCount = dimensions.size(); this.maxDictionarySize = maxDictionarySize; this.limitSpec = limitSpec; this.aggregatorFactories = aggregatorFactories; this.valueTypes = valueTypes; } @Override public long getMaxDictionarySize() { return maxDictionarySize; } @Override public Grouper.KeySerde<RowBasedKey> factorize() { return new RowBasedKeySerde( includeTimestamp, sortByDimsFirst, dimensions, maxDictionarySize, limitSpec, valueTypes, null ); } @Override public Grouper.KeySerde<RowBasedKey> factorizeWithDictionary(List<String> dictionary) { return new RowBasedKeySerde( includeTimestamp, sortByDimsFirst, dimensions, maxDictionarySize, limitSpec, valueTypes, dictionary ); } @Override public Comparator<Grouper.Entry<RowBasedKey>> objectComparator(boolean forceDefaultOrder) { if (limitSpec != null && !forceDefaultOrder) { return objectComparatorWithAggs(); } if (includeTimestamp) { if (sortByDimsFirst) { return new Comparator<Grouper.Entry<RowBasedKey>>() { @Override public int compare(Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey> entry2) { final int cmp = compareDimsInRows(entry1.getKey(), entry2.getKey(), 1); if (cmp != 0) { return cmp; } return Longs.compare((long) entry1.getKey().getKey()[0], (long) entry2.getKey().getKey()[0]); } }; } else { return new Comparator<Grouper.Entry<RowBasedKey>>() { @Override public int compare(Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey> entry2) { final int timeCompare = Longs.compare( (long) entry1.getKey().getKey()[0], (long) entry2.getKey().getKey()[0] ); if (timeCompare != 0) { return timeCompare; } return compareDimsInRows(entry1.getKey(), entry2.getKey(), 1); } }; } } else { return new Comparator<Grouper.Entry<RowBasedKey>>() { @Override public int compare(Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey> entry2) { return compareDimsInRows(entry1.getKey(), entry2.getKey(), 0); } }; } } private Comparator<Grouper.Entry<RowBasedKey>> objectComparatorWithAggs() { // use the actual sort order from the limitspec if pushing down to merge partial results correctly final List<Boolean> needsReverses = Lists.newArrayList(); final List<Boolean> aggFlags = Lists.newArrayList(); final List<Boolean> isNumericField = Lists.newArrayList(); final List<StringComparator> comparators = Lists.newArrayList(); final List<Integer> fieldIndices = Lists.newArrayList(); final Set<Integer> orderByIndices = new HashSet<>(); for (OrderByColumnSpec orderSpec : limitSpec.getColumns()) { final boolean needsReverse = orderSpec.getDirection() != OrderByColumnSpec.Direction.ASCENDING; int dimIndex = OrderByColumnSpec.getDimIndexForOrderBy(orderSpec, dimensions); if (dimIndex >= 0) { fieldIndices.add(dimIndex); orderByIndices.add(dimIndex); needsReverses.add(needsReverse); aggFlags.add(false); final ValueType type = dimensions.get(dimIndex).getOutputType(); isNumericField.add(ValueType.isNumeric(type)); comparators.add(orderSpec.getDimensionComparator()); } else { int aggIndex = OrderByColumnSpec.getAggIndexForOrderBy(orderSpec, Arrays.asList(aggregatorFactories)); if (aggIndex >= 0) { fieldIndices.add(aggIndex); needsReverses.add(needsReverse); aggFlags.add(true); final String typeName = aggregatorFactories[aggIndex].getTypeName(); isNumericField.add(ValueType.isNumeric(ValueType.fromString(typeName))); comparators.add(orderSpec.getDimensionComparator()); } } } for (int i = 0; i < dimCount; i++) { if (!orderByIndices.contains(i)) { fieldIndices.add(i); aggFlags.add(false); needsReverses.add(false); final ValueType type = dimensions.get(i).getOutputType(); isNumericField.add(ValueType.isNumeric(type)); comparators.add(StringComparators.LEXICOGRAPHIC); } } if (includeTimestamp) { if (sortByDimsFirst) { return new Comparator<Grouper.Entry<RowBasedKey>>() { @Override public int compare(Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey> entry2) { final int cmp = compareDimsInRowsWithAggs( entry1, entry2, 1, needsReverses, aggFlags, fieldIndices, isNumericField, comparators ); if (cmp != 0) { return cmp; } return Longs.compare((long) entry1.getKey().getKey()[0], (long) entry2.getKey().getKey()[0]); } }; } else { return new Comparator<Grouper.Entry<RowBasedKey>>() { @Override public int compare(Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey> entry2) { final int timeCompare = Longs.compare((long) entry1.getKey().getKey()[0], (long) entry2.getKey().getKey()[0]); if (timeCompare != 0) { return timeCompare; } return compareDimsInRowsWithAggs( entry1, entry2, 1, needsReverses, aggFlags, fieldIndices, isNumericField, comparators ); } }; } } else { return new Comparator<Grouper.Entry<RowBasedKey>>() { @Override public int compare(Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey> entry2) { return compareDimsInRowsWithAggs( entry1, entry2, 0, needsReverses, aggFlags, fieldIndices, isNumericField, comparators ); } }; } } private static int compareDimsInRows(RowBasedKey key1, RowBasedKey key2, int dimStart) { for (int i = dimStart; i < key1.getKey().length; i++) { final int cmp = ((Comparable) key1.getKey()[i]).compareTo(key2.getKey()[i]); if (cmp != 0) { return cmp; } } return 0; } private static int compareDimsInRowsWithAggs( Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey> entry2, int dimStart, final List<Boolean> needsReverses, final List<Boolean> aggFlags, final List<Integer> fieldIndices, final List<Boolean> isNumericField, final List<StringComparator> comparators ) { for (int i = 0; i < fieldIndices.size(); i++) { final int fieldIndex = fieldIndices.get(i); final boolean needsReverse = needsReverses.get(i); final int cmp; final Comparable lhs; final Comparable rhs; if (aggFlags.get(i)) { if (needsReverse) { lhs = (Comparable) entry2.getValues()[fieldIndex]; rhs = (Comparable) entry1.getValues()[fieldIndex]; } else { lhs = (Comparable) entry1.getValues()[fieldIndex]; rhs = (Comparable) entry2.getValues()[fieldIndex]; } } else { if (needsReverse) { lhs = (Comparable) entry2.getKey().getKey()[fieldIndex + dimStart]; rhs = (Comparable) entry1.getKey().getKey()[fieldIndex + dimStart]; } else { lhs = (Comparable) entry1.getKey().getKey()[fieldIndex + dimStart]; rhs = (Comparable) entry2.getKey().getKey()[fieldIndex + dimStart]; } } final StringComparator comparator = comparators.get(i); if (isNumericField.get(i) && comparator.equals(StringComparators.NUMERIC)) { // use natural comparison cmp = lhs.compareTo(rhs); } else { cmp = comparator.compare(lhs.toString(), rhs.toString()); } if (cmp != 0) { return cmp; } } return 0; } } static long estimateStringKeySize(String key) { return (long) key.length() * Character.BYTES + ROUGH_OVERHEAD_PER_DICTIONARY_ENTRY; } private static class RowBasedKeySerde implements Grouper.KeySerde<RowBasedGrouperHelper.RowBasedKey> { private static final int DICTIONARY_INITIAL_CAPACITY = 10000; private static final int UNKNOWN_DICTIONARY_ID = -1; private final boolean includeTimestamp; private final boolean sortByDimsFirst; private final List<DimensionSpec> dimensions; private final int dimCount; private final int keySize; private final ByteBuffer keyBuffer; private final RowBasedKeySerdeHelper[] serdeHelpers; private final BufferComparator[] serdeHelperComparators; private final DefaultLimitSpec limitSpec; private final List<ValueType> valueTypes; private final boolean enableRuntimeDictionaryGeneration; private final List<String> dictionary; private final Object2IntMap<String> reverseDictionary; // Size limiting for the dictionary, in (roughly estimated) bytes. private final long maxDictionarySize; private long currentEstimatedSize = 0; // dictionary id -> rank of the sorted dictionary // This is initialized in the constructor and bufferComparator() with static dictionary and dynamic dictionary, // respectively. private int[] rankOfDictionaryIds = null; RowBasedKeySerde( final boolean includeTimestamp, final boolean sortByDimsFirst, final List<DimensionSpec> dimensions, final long maxDictionarySize, final DefaultLimitSpec limitSpec, final List<ValueType> valueTypes, @Nullable final List<String> dictionary ) { this.includeTimestamp = includeTimestamp; this.sortByDimsFirst = sortByDimsFirst; this.dimensions = dimensions; this.dimCount = dimensions.size(); this.valueTypes = valueTypes; this.limitSpec = limitSpec; this.enableRuntimeDictionaryGeneration = dictionary == null; this.dictionary = enableRuntimeDictionaryGeneration ? new ArrayList<>(DICTIONARY_INITIAL_CAPACITY) : dictionary; this.reverseDictionary = enableRuntimeDictionaryGeneration ? new Object2IntOpenHashMap<>(DICTIONARY_INITIAL_CAPACITY) : new Object2IntOpenHashMap<>(dictionary.size()); this.reverseDictionary.defaultReturnValue(UNKNOWN_DICTIONARY_ID); this.maxDictionarySize = maxDictionarySize; this.serdeHelpers = makeSerdeHelpers(limitSpec != null, enableRuntimeDictionaryGeneration); this.serdeHelperComparators = new BufferComparator[serdeHelpers.length]; Arrays.setAll(serdeHelperComparators, i -> serdeHelpers[i].getBufferComparator()); this.keySize = (includeTimestamp ? Long.BYTES : 0) + getTotalKeySize(); this.keyBuffer = ByteBuffer.allocate(keySize); if (!enableRuntimeDictionaryGeneration) { final long initialDictionarySize = dictionary.stream() .mapToLong(RowBasedGrouperHelper::estimateStringKeySize) .sum(); Preconditions.checkState( maxDictionarySize >= initialDictionarySize, "Dictionary size[%s] exceeds threshold[%s]", initialDictionarySize, maxDictionarySize ); for (int i = 0; i < dictionary.size(); i++) { reverseDictionary.put(dictionary.get(i), i); } initializeRankOfDictionaryIds(); } } private void initializeRankOfDictionaryIds() { final int dictionarySize = dictionary.size(); rankOfDictionaryIds = IntStream.range(0, dictionarySize).toArray(); IntArrays.quickSort( rankOfDictionaryIds, (i1, i2) -> dictionary.get(i1).compareTo(dictionary.get(i2)) ); IntArrayUtils.inverse(rankOfDictionaryIds); } @Override public int keySize() { return keySize; } @Override public Class<RowBasedKey> keyClazz() { return RowBasedKey.class; } @Override public List<String> getDictionary() { return dictionary; } @Override public ByteBuffer toByteBuffer(RowBasedKey key) { keyBuffer.rewind(); final int dimStart; if (includeTimestamp) { keyBuffer.putLong((long) key.getKey()[0]); dimStart = 1; } else { dimStart = 0; } for (int i = dimStart; i < key.getKey().length; i++) { if (!serdeHelpers[i - dimStart].putToKeyBuffer(key, i)) { return null; } } keyBuffer.flip(); return keyBuffer; } @Override public RowBasedKey fromByteBuffer(ByteBuffer buffer, int position) { final int dimStart; final Comparable[] key; final int dimsPosition; if (includeTimestamp) { key = new Comparable[dimCount + 1]; key[0] = buffer.getLong(position); dimsPosition = position + Long.BYTES; dimStart = 1; } else { key = new Comparable[dimCount]; dimsPosition = position; dimStart = 0; } for (int i = dimStart; i < key.length; i++) { // Writes value from buffer to key[i] serdeHelpers[i - dimStart].getFromByteBuffer(buffer, dimsPosition, i, key); } return new RowBasedKey(key); } @Override public Grouper.BufferComparator bufferComparator() { if (rankOfDictionaryIds == null) { initializeRankOfDictionaryIds(); } if (includeTimestamp) { if (sortByDimsFirst) { return new Grouper.BufferComparator() { @Override public int compare(ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition, int rhsPosition) { final int cmp = compareDimsInBuffersForNullFudgeTimestamp( serdeHelperComparators, lhsBuffer, rhsBuffer, lhsPosition, rhsPosition ); if (cmp != 0) { return cmp; } return Longs.compare(lhsBuffer.getLong(lhsPosition), rhsBuffer.getLong(rhsPosition)); } }; } else { return new Grouper.BufferComparator() { @Override public int compare(ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition, int rhsPosition) { final int timeCompare = Longs.compare(lhsBuffer.getLong(lhsPosition), rhsBuffer.getLong(rhsPosition)); if (timeCompare != 0) { return timeCompare; } return compareDimsInBuffersForNullFudgeTimestamp( serdeHelperComparators, lhsBuffer, rhsBuffer, lhsPosition, rhsPosition ); } }; } } else { return new Grouper.BufferComparator() { @Override public int compare(ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition, int rhsPosition) { for (int i = 0; i < dimCount; i++) { final int cmp = serdeHelperComparators[i].compare( lhsBuffer, rhsBuffer, lhsPosition, rhsPosition ); if (cmp != 0) { return cmp; } } return 0; } }; } } @Override public Grouper.BufferComparator bufferComparatorWithAggregators( AggregatorFactory[] aggregatorFactories, int[] aggregatorOffsets ) { final List<RowBasedKeySerdeHelper> adjustedSerdeHelpers; final List<Boolean> needsReverses = Lists.newArrayList(); List<RowBasedKeySerdeHelper> orderByHelpers = new ArrayList<>(); List<RowBasedKeySerdeHelper> otherDimHelpers = new ArrayList<>(); Set<Integer> orderByIndices = new HashSet<>(); int aggCount = 0; boolean needsReverse; for (OrderByColumnSpec orderSpec : limitSpec.getColumns()) { needsReverse = orderSpec.getDirection() != OrderByColumnSpec.Direction.ASCENDING; int dimIndex = OrderByColumnSpec.getDimIndexForOrderBy(orderSpec, dimensions); if (dimIndex >= 0) { RowBasedKeySerdeHelper serdeHelper = serdeHelpers[dimIndex]; orderByHelpers.add(serdeHelper); orderByIndices.add(dimIndex); needsReverses.add(needsReverse); } else { int aggIndex = OrderByColumnSpec.getAggIndexForOrderBy(orderSpec, Arrays.asList(aggregatorFactories)); if (aggIndex >= 0) { final RowBasedKeySerdeHelper serdeHelper; final StringComparator stringComparator = orderSpec.getDimensionComparator(); final String typeName = aggregatorFactories[aggIndex].getTypeName(); final int aggOffset = aggregatorOffsets[aggIndex] - Integer.BYTES; aggCount++; final ValueType valueType = ValueType.fromString(typeName); if (!ValueType.isNumeric(valueType)) { throw new IAE("Cannot order by a non-numeric aggregator[%s]", orderSpec); } serdeHelper = makeNumericSerdeHelper(valueType, aggOffset, true, stringComparator); orderByHelpers.add(serdeHelper); needsReverses.add(needsReverse); } } } for (int i = 0; i < dimCount; i++) { if (!orderByIndices.contains(i)) { otherDimHelpers.add(serdeHelpers[i]); needsReverses.add(false); // default to Ascending order if dim is not in an orderby spec } } adjustedSerdeHelpers = orderByHelpers; adjustedSerdeHelpers.addAll(otherDimHelpers); final BufferComparator[] adjustedSerdeHelperComparators = new BufferComparator[adjustedSerdeHelpers.size()]; Arrays.setAll(adjustedSerdeHelperComparators, i -> adjustedSerdeHelpers.get(i).getBufferComparator()); final int fieldCount = dimCount + aggCount; if (includeTimestamp) { if (sortByDimsFirst) { return new Grouper.BufferComparator() { @Override public int compare(ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition, int rhsPosition) { final int cmp = compareDimsInBuffersForNullFudgeTimestampForPushDown( adjustedSerdeHelperComparators, needsReverses, fieldCount, lhsBuffer, rhsBuffer, lhsPosition, rhsPosition ); if (cmp != 0) { return cmp; } return Longs.compare(lhsBuffer.getLong(lhsPosition), rhsBuffer.getLong(rhsPosition)); } }; } else { return new Grouper.BufferComparator() { @Override public int compare(ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition, int rhsPosition) { final int timeCompare = Longs.compare(lhsBuffer.getLong(lhsPosition), rhsBuffer.getLong(rhsPosition)); if (timeCompare != 0) { return timeCompare; } int cmp = compareDimsInBuffersForNullFudgeTimestampForPushDown( adjustedSerdeHelperComparators, needsReverses, fieldCount, lhsBuffer, rhsBuffer, lhsPosition, rhsPosition ); return cmp; } }; } } else { return new Grouper.BufferComparator() { @Override public int compare(ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition, int rhsPosition) { for (int i = 0; i < fieldCount; i++) { final int cmp; if (needsReverses.get(i)) { cmp = adjustedSerdeHelperComparators[i].compare( rhsBuffer, lhsBuffer, rhsPosition, lhsPosition ); } else { cmp = adjustedSerdeHelperComparators[i].compare( lhsBuffer, rhsBuffer, lhsPosition, rhsPosition ); } if (cmp != 0) { return cmp; } } return 0; } }; } } @Override public void reset() { if (enableRuntimeDictionaryGeneration) { dictionary.clear(); reverseDictionary.clear(); rankOfDictionaryIds = null; currentEstimatedSize = 0; } } private int getTotalKeySize() { int size = 0; for (RowBasedKeySerdeHelper helper : serdeHelpers) { size += helper.getKeyBufferValueSize(); } return size; } private RowBasedKeySerdeHelper[] makeSerdeHelpers( boolean pushLimitDown, boolean enableRuntimeDictionaryGeneration ) { final List<RowBasedKeySerdeHelper> helpers = new ArrayList<>(); int keyBufferPosition = 0; for (int i = 0; i < dimCount; i++) { final StringComparator stringComparator; if (limitSpec != null) { final String dimName = dimensions.get(i).getOutputName(); stringComparator = DefaultLimitSpec.getComparatorForDimName(limitSpec, dimName); } else { stringComparator = null; } RowBasedKeySerdeHelper helper = makeSerdeHelper( valueTypes.get(i), keyBufferPosition, pushLimitDown, stringComparator, enableRuntimeDictionaryGeneration ); keyBufferPosition += helper.getKeyBufferValueSize(); helpers.add(helper); } return helpers.toArray(new RowBasedKeySerdeHelper[helpers.size()]); } private RowBasedKeySerdeHelper makeSerdeHelper( ValueType valueType, int keyBufferPosition, boolean pushLimitDown, @Nullable StringComparator stringComparator, boolean enableRuntimeDictionaryGeneration ) { switch (valueType) { case STRING: if (enableRuntimeDictionaryGeneration) { return new DynamicDictionaryStringRowBasedKeySerdeHelper( keyBufferPosition, pushLimitDown, stringComparator ); } else { return new StaticDictionaryStringRowBasedKeySerdeHelper( keyBufferPosition, pushLimitDown, stringComparator ); } case LONG: case FLOAT: case DOUBLE: return makeNumericSerdeHelper(valueType, keyBufferPosition, pushLimitDown, stringComparator); default: throw new IAE("invalid type: %s", valueType); } } private RowBasedKeySerdeHelper makeNumericSerdeHelper( ValueType valueType, int keyBufferPosition, boolean pushLimitDown, @Nullable StringComparator stringComparator ) { switch (valueType) { case LONG: return new LongRowBasedKeySerdeHelper(keyBufferPosition, pushLimitDown, stringComparator); case FLOAT: return new FloatRowBasedKeySerdeHelper(keyBufferPosition, pushLimitDown, stringComparator); case DOUBLE: return new DoubleRowBasedKeySerdeHelper(keyBufferPosition, pushLimitDown, stringComparator); default: throw new IAE("invalid type: %s", valueType); } } private static boolean isPrimitiveComparable(boolean pushLimitDown, @Nullable StringComparator stringComparator) { return !pushLimitDown || stringComparator == null || stringComparator.equals(StringComparators.NUMERIC); } private abstract class AbstractStringRowBasedKeySerdeHelper implements RowBasedKeySerdeHelper { final int keyBufferPosition; final BufferComparator bufferComparator; AbstractStringRowBasedKeySerdeHelper( int keyBufferPosition, boolean pushLimitDown, @Nullable StringComparator stringComparator ) { this.keyBufferPosition = keyBufferPosition; if (!pushLimitDown) { bufferComparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> Ints.compare( rankOfDictionaryIds[lhsBuffer.getInt(lhsPosition + keyBufferPosition)], rankOfDictionaryIds[rhsBuffer.getInt(rhsPosition + keyBufferPosition)] ); } else { final StringComparator realComparator = stringComparator == null ? StringComparators.LEXICOGRAPHIC : stringComparator; bufferComparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> { String lhsStr = dictionary.get(lhsBuffer.getInt(lhsPosition + keyBufferPosition)); String rhsStr = dictionary.get(rhsBuffer.getInt(rhsPosition + keyBufferPosition)); return realComparator.compare(lhsStr, rhsStr); }; } } @Override public int getKeyBufferValueSize() { return Integer.BYTES; } @Override public void getFromByteBuffer(ByteBuffer buffer, int initialOffset, int dimValIdx, Comparable[] dimValues) { dimValues[dimValIdx] = dictionary.get(buffer.getInt(initialOffset + keyBufferPosition)); } @Override public BufferComparator getBufferComparator() { return bufferComparator; } } private class DynamicDictionaryStringRowBasedKeySerdeHelper extends AbstractStringRowBasedKeySerdeHelper { DynamicDictionaryStringRowBasedKeySerdeHelper( int keyBufferPosition, boolean pushLimitDown, @Nullable StringComparator stringComparator ) { super(keyBufferPosition, pushLimitDown, stringComparator); } @Override public boolean putToKeyBuffer(RowBasedKey key, int idx) { final int id = addToDictionary((String) key.getKey()[idx]); if (id < 0) { return false; } keyBuffer.putInt(id); return true; } /** * Adds s to the dictionary. If the dictionary's size limit would be exceeded by adding this key, then * this returns -1. * * @param s a string * * @return id for this string, or -1 */ private int addToDictionary(final String s) { int idx = reverseDictionary.getInt(s); if (idx == UNKNOWN_DICTIONARY_ID) { final long additionalEstimatedSize = estimateStringKeySize(s); if (currentEstimatedSize + additionalEstimatedSize > maxDictionarySize) { return -1; } idx = dictionary.size(); reverseDictionary.put(s, idx); dictionary.add(s); currentEstimatedSize += additionalEstimatedSize; } return idx; } } private class StaticDictionaryStringRowBasedKeySerdeHelper extends AbstractStringRowBasedKeySerdeHelper { StaticDictionaryStringRowBasedKeySerdeHelper( int keyBufferPosition, boolean pushLimitDown, @Nullable StringComparator stringComparator ) { super(keyBufferPosition, pushLimitDown, stringComparator); } @Override public boolean putToKeyBuffer(RowBasedKey key, int idx) { final String stringKey = (String) key.getKey()[idx]; final int dictIndex = reverseDictionary.getInt(stringKey); if (dictIndex == UNKNOWN_DICTIONARY_ID) { throw new ISE("Cannot find key[%s] from dictionary", stringKey); } keyBuffer.putInt(dictIndex); return true; } } private class LongRowBasedKeySerdeHelper implements RowBasedKeySerdeHelper { final int keyBufferPosition; final BufferComparator bufferComparator; LongRowBasedKeySerdeHelper( int keyBufferPosition, boolean pushLimitDown, @Nullable StringComparator stringComparator ) { this.keyBufferPosition = keyBufferPosition; if (isPrimitiveComparable(pushLimitDown, stringComparator)) { bufferComparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> Longs.compare( lhsBuffer.getLong(lhsPosition + keyBufferPosition), rhsBuffer.getLong(rhsPosition + keyBufferPosition) ); } else { bufferComparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> { long lhs = lhsBuffer.getLong(lhsPosition + keyBufferPosition); long rhs = rhsBuffer.getLong(rhsPosition + keyBufferPosition); return stringComparator.compare(String.valueOf(lhs), String.valueOf(rhs)); }; } } @Override public int getKeyBufferValueSize() { return Long.BYTES; } @Override public boolean putToKeyBuffer(RowBasedKey key, int idx) { keyBuffer.putLong((Long) key.getKey()[idx]); return true; } @Override public void getFromByteBuffer(ByteBuffer buffer, int initialOffset, int dimValIdx, Comparable[] dimValues) { dimValues[dimValIdx] = buffer.getLong(initialOffset + keyBufferPosition); } @Override public BufferComparator getBufferComparator() { return bufferComparator; } } private class FloatRowBasedKeySerdeHelper implements RowBasedKeySerdeHelper { final int keyBufferPosition; final BufferComparator bufferComparator; FloatRowBasedKeySerdeHelper( int keyBufferPosition, boolean pushLimitDown, @Nullable StringComparator stringComparator) { this.keyBufferPosition = keyBufferPosition; if (isPrimitiveComparable(pushLimitDown, stringComparator)) { bufferComparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> Float.compare( lhsBuffer.getFloat(lhsPosition + keyBufferPosition), rhsBuffer.getFloat(rhsPosition + keyBufferPosition) ); } else { bufferComparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> { float lhs = lhsBuffer.getFloat(lhsPosition + keyBufferPosition); float rhs = rhsBuffer.getFloat(rhsPosition + keyBufferPosition); return stringComparator.compare(String.valueOf(lhs), String.valueOf(rhs)); }; } } @Override public int getKeyBufferValueSize() { return Float.BYTES; } @Override public boolean putToKeyBuffer(RowBasedKey key, int idx) { keyBuffer.putFloat((Float) key.getKey()[idx]); return true; } @Override public void getFromByteBuffer(ByteBuffer buffer, int initialOffset, int dimValIdx, Comparable[] dimValues) { dimValues[dimValIdx] = buffer.getFloat(initialOffset + keyBufferPosition); } @Override public BufferComparator getBufferComparator() { return bufferComparator; } } private class DoubleRowBasedKeySerdeHelper implements RowBasedKeySerdeHelper { final int keyBufferPosition; final BufferComparator bufferComparator; DoubleRowBasedKeySerdeHelper( int keyBufferPosition, boolean pushLimitDown, @Nullable StringComparator stringComparator ) { this.keyBufferPosition = keyBufferPosition; if (isPrimitiveComparable(pushLimitDown, stringComparator)) { bufferComparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> Double.compare( lhsBuffer.getDouble(lhsPosition + keyBufferPosition), rhsBuffer.getDouble(rhsPosition + keyBufferPosition) ); } else { bufferComparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> { double lhs = lhsBuffer.getDouble(lhsPosition + keyBufferPosition); double rhs = rhsBuffer.getDouble(rhsPosition + keyBufferPosition); return stringComparator.compare(String.valueOf(lhs), String.valueOf(rhs)); }; } } @Override public int getKeyBufferValueSize() { return Double.BYTES; } @Override public boolean putToKeyBuffer(RowBasedKey key, int idx) { keyBuffer.putDouble((Double) key.getKey()[idx]); return true; } @Override public void getFromByteBuffer(ByteBuffer buffer, int initialOffset, int dimValIdx, Comparable[] dimValues) { dimValues[dimValIdx] = buffer.getDouble(initialOffset + keyBufferPosition); } @Override public BufferComparator getBufferComparator() { return bufferComparator; } } } private static int compareDimsInBuffersForNullFudgeTimestamp( BufferComparator[] serdeHelperComparators, ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition, int rhsPosition ) { for (BufferComparator comparator : serdeHelperComparators) { final int cmp = comparator.compare( lhsBuffer, rhsBuffer, lhsPosition + Long.BYTES, rhsPosition + Long.BYTES ); if (cmp != 0) { return cmp; } } return 0; } private static int compareDimsInBuffersForNullFudgeTimestampForPushDown( BufferComparator[] serdeHelperComparators, List<Boolean> needsReverses, int dimCount, ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition, int rhsPosition ) { for (int i = 0; i < dimCount; i++) { final int cmp; if (needsReverses.get(i)) { cmp = serdeHelperComparators[i].compare( rhsBuffer, lhsBuffer, rhsPosition + Long.BYTES, lhsPosition + Long.BYTES ); } else { cmp = serdeHelperComparators[i].compare( lhsBuffer, rhsBuffer, lhsPosition + Long.BYTES, rhsPosition + Long.BYTES ); } if (cmp != 0) { return cmp; } } return 0; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.09.30 at 06:15:10 PM PDT // package org.w3._1999.xhtml; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attGroup ref="{http://www.w3.org/1999/xhtml}cellvalign"/> * &lt;attGroup ref="{http://www.w3.org/1999/xhtml}attrs"/> * &lt;attGroup ref="{http://www.w3.org/1999/xhtml}cellhalign"/> * &lt;attribute name="span" type="{http://www.w3.org/1999/xhtml}Number" default="1" /> * &lt;attribute name="width" type="{http://www.w3.org/1999/xhtml}MultiLength" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") @XmlRootElement(name = "col") public class Col { @XmlAttribute(name = "span") protected BigInteger span; @XmlAttribute(name = "width") protected String width; @XmlAttribute(name = "valign") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String valign; @XmlAttribute(name = "onclick") protected String onclick; @XmlAttribute(name = "ondblclick") protected String ondblclick; @XmlAttribute(name = "onmousedown") protected String onmousedown; @XmlAttribute(name = "onmouseup") protected String onmouseup; @XmlAttribute(name = "onmouseover") protected String onmouseover; @XmlAttribute(name = "onmousemove") protected String onmousemove; @XmlAttribute(name = "onmouseout") protected String onmouseout; @XmlAttribute(name = "onkeypress") protected String onkeypress; @XmlAttribute(name = "onkeydown") protected String onkeydown; @XmlAttribute(name = "onkeyup") protected String onkeyup; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") protected String lang; @XmlAttribute(name = "dir") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String dir; @XmlAttribute(name = "id") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "class") @XmlSchemaType(name = "NMTOKENS") protected List<String> clazz; @XmlAttribute(name = "style") protected String style; @XmlAttribute(name = "title") protected String title; @XmlAttribute(name = "align") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String align; @XmlAttribute(name = "char") protected String _char; @XmlAttribute(name = "charoff") protected String charoff; /** * Gets the value of the span property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getSpan() { if (span == null) { return new BigInteger("1"); } else { return span; } } /** * Sets the value of the span property. * * @param value * allowed object is * {@link BigInteger } * */ public void setSpan(BigInteger value) { this.span = value; } /** * Gets the value of the width property. * * @return * possible object is * {@link String } * */ public String getWidth() { return width; } /** * Sets the value of the width property. * * @param value * allowed object is * {@link String } * */ public void setWidth(String value) { this.width = value; } /** * Gets the value of the valign property. * * @return * possible object is * {@link String } * */ public String getValign() { return valign; } /** * Sets the value of the valign property. * * @param value * allowed object is * {@link String } * */ public void setValign(String value) { this.valign = value; } /** * Gets the value of the onclick property. * * @return * possible object is * {@link String } * */ public String getOnclick() { return onclick; } /** * Sets the value of the onclick property. * * @param value * allowed object is * {@link String } * */ public void setOnclick(String value) { this.onclick = value; } /** * Gets the value of the ondblclick property. * * @return * possible object is * {@link String } * */ public String getOndblclick() { return ondblclick; } /** * Sets the value of the ondblclick property. * * @param value * allowed object is * {@link String } * */ public void setOndblclick(String value) { this.ondblclick = value; } /** * Gets the value of the onmousedown property. * * @return * possible object is * {@link String } * */ public String getOnmousedown() { return onmousedown; } /** * Sets the value of the onmousedown property. * * @param value * allowed object is * {@link String } * */ public void setOnmousedown(String value) { this.onmousedown = value; } /** * Gets the value of the onmouseup property. * * @return * possible object is * {@link String } * */ public String getOnmouseup() { return onmouseup; } /** * Sets the value of the onmouseup property. * * @param value * allowed object is * {@link String } * */ public void setOnmouseup(String value) { this.onmouseup = value; } /** * Gets the value of the onmouseover property. * * @return * possible object is * {@link String } * */ public String getOnmouseover() { return onmouseover; } /** * Sets the value of the onmouseover property. * * @param value * allowed object is * {@link String } * */ public void setOnmouseover(String value) { this.onmouseover = value; } /** * Gets the value of the onmousemove property. * * @return * possible object is * {@link String } * */ public String getOnmousemove() { return onmousemove; } /** * Sets the value of the onmousemove property. * * @param value * allowed object is * {@link String } * */ public void setOnmousemove(String value) { this.onmousemove = value; } /** * Gets the value of the onmouseout property. * * @return * possible object is * {@link String } * */ public String getOnmouseout() { return onmouseout; } /** * Sets the value of the onmouseout property. * * @param value * allowed object is * {@link String } * */ public void setOnmouseout(String value) { this.onmouseout = value; } /** * Gets the value of the onkeypress property. * * @return * possible object is * {@link String } * */ public String getOnkeypress() { return onkeypress; } /** * Sets the value of the onkeypress property. * * @param value * allowed object is * {@link String } * */ public void setOnkeypress(String value) { this.onkeypress = value; } /** * Gets the value of the onkeydown property. * * @return * possible object is * {@link String } * */ public String getOnkeydown() { return onkeydown; } /** * Sets the value of the onkeydown property. * * @param value * allowed object is * {@link String } * */ public void setOnkeydown(String value) { this.onkeydown = value; } /** * Gets the value of the onkeyup property. * * @return * possible object is * {@link String } * */ public String getOnkeyup() { return onkeyup; } /** * Sets the value of the onkeyup property. * * @param value * allowed object is * {@link String } * */ public void setOnkeyup(String value) { this.onkeyup = value; } /** * Gets the value of the lang property. * * @return * possible object is * {@link String } * */ public String getLang() { return lang; } /** * Sets the value of the lang property. * * @param value * allowed object is * {@link String } * */ public void setLang(String value) { this.lang = value; } /** * Gets the value of the dir property. * * @return * possible object is * {@link String } * */ public String getDir() { return dir; } /** * Sets the value of the dir property. * * @param value * allowed object is * {@link String } * */ public void setDir(String value) { this.dir = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the clazz property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the clazz property. * * <p> * For example, to add a new item, do as follows: * <pre> * getClazz().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getClazz() { if (clazz == null) { clazz = new ArrayList<String>(); } return this.clazz; } /** * Gets the value of the style property. * * @return * possible object is * {@link String } * */ public String getStyle() { return style; } /** * Sets the value of the style property. * * @param value * allowed object is * {@link String } * */ public void setStyle(String value) { this.style = value; } /** * Gets the value of the title property. * * @return * possible object is * {@link String } * */ public String getTitle() { return title; } /** * Sets the value of the title property. * * @param value * allowed object is * {@link String } * */ public void setTitle(String value) { this.title = value; } /** * Gets the value of the align property. * * @return * possible object is * {@link String } * */ public String getAlign() { return align; } /** * Sets the value of the align property. * * @param value * allowed object is * {@link String } * */ public void setAlign(String value) { this.align = value; } /** * Gets the value of the char property. * * @return * possible object is * {@link String } * */ public String getChar() { return _char; } /** * Sets the value of the char property. * * @param value * allowed object is * {@link String } * */ public void setChar(String value) { this._char = value; } /** * Gets the value of the charoff property. * * @return * possible object is * {@link String } * */ public String getCharoff() { return charoff; } /** * Sets the value of the charoff property. * * @param value * allowed object is * {@link String } * */ public void setCharoff(String value) { this.charoff = value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.pherf.workload.mt.generators; import com.lmax.disruptor.BlockingWaitStrategy; import com.lmax.disruptor.EventFactory; import com.lmax.disruptor.ExceptionHandler; import com.lmax.disruptor.RingBuffer; import com.lmax.disruptor.WorkHandler; import com.lmax.disruptor.dsl.Disruptor; import com.lmax.disruptor.dsl.ProducerType; import org.apache.hadoop.hbase.util.Threads; import org.apache.phoenix.pherf.configuration.DataModel; import org.apache.phoenix.pherf.configuration.Scenario; import org.apache.phoenix.pherf.util.PhoenixUtil; import org.apache.phoenix.pherf.workload.mt.handlers.PherfWorkHandler; import org.apache.phoenix.pherf.workload.mt.operations.TenantOperationFactory; import org.apache.phoenix.pherf.workload.mt.handlers.TenantOperationWorkHandler; import org.apache.phoenix.thirdparty.com.google.common.collect.Lists; import org.apache.phoenix.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.List; import java.util.Properties; /** * A base class for all load event generators. */ public abstract class BaseLoadEventGenerator implements LoadEventGenerator<TenantOperationInfo> { protected static final int DEFAULT_NUM_HANDLER_PER_SCENARIO = 4; protected static final int DEFAULT_BUFFER_SIZE = 8192; protected static final Logger LOGGER = LoggerFactory.getLogger( BaseLoadEventGenerator.class); protected Disruptor<TenantOperationEvent> disruptor; protected List<PherfWorkHandler> handlers; protected final Properties properties; protected final TenantOperationFactory operationFactory; protected final ExceptionHandler exceptionHandler; private static class WorkloadExceptionHandler implements ExceptionHandler { private static final Logger LOGGER = LoggerFactory.getLogger(WorkloadExceptionHandler.class); @Override public void handleEventException(Throwable ex, long sequence, Object event) { LOGGER.error("Sequence=" + sequence + ", event=" + event, ex); throw new RuntimeException(ex); } @Override public void handleOnStartException(Throwable ex) { LOGGER.error("On Start", ex); throw new RuntimeException(ex); } @Override public void handleOnShutdownException(Throwable ex) { LOGGER.error("On Shutdown", ex); throw new RuntimeException(ex); } } public static class TenantOperationEvent { TenantOperationInfo tenantOperationInfo; public TenantOperationInfo getTenantOperationInfo() { return tenantOperationInfo; } public void setTenantOperationInfo(TenantOperationInfo tenantOperationInfo) { this.tenantOperationInfo = tenantOperationInfo; } public static final EventFactory<TenantOperationEvent> EVENT_FACTORY = new EventFactory<TenantOperationEvent>() { public TenantOperationEvent newInstance() { return new TenantOperationEvent(); } }; } public BaseLoadEventGenerator(PhoenixUtil phoenixUtil, DataModel model, Scenario scenario, List<PherfWorkHandler> workers, Properties properties) { this(phoenixUtil, model, scenario, workers, new WorkloadExceptionHandler(), properties); } public BaseLoadEventGenerator(PhoenixUtil phoenixUtil, DataModel model, Scenario scenario, Properties properties) { this(phoenixUtil, model, scenario, null, new WorkloadExceptionHandler(), properties); } public BaseLoadEventGenerator(PhoenixUtil phoenixUtil, DataModel model, Scenario scenario, List<PherfWorkHandler> workers, ExceptionHandler exceptionHandler, Properties properties) { operationFactory = new TenantOperationFactory(phoenixUtil, model, scenario); if (scenario.getPhoenixProperties() != null) { properties.putAll(scenario.getPhoenixProperties()); } this.properties = properties; if (workers == null || workers.isEmpty()) { workers = getWorkHandlers(properties); } this.handlers = workers; this.exceptionHandler = exceptionHandler; } @Override public PhoenixUtil getPhoenixUtil() { return operationFactory.getPhoenixUtil(); } @Override public Scenario getScenario() { return operationFactory.getScenario(); } @Override public DataModel getModel() { return operationFactory.getModel(); } @Override public Properties getProperties() { return this.properties; } @Override public TenantOperationFactory getOperationFactory() { return operationFactory; } @Override public void start() throws Exception { Scenario scenario = operationFactory.getScenario(); String currentThreadName = Thread.currentThread().getName(); int bufferSize = DEFAULT_BUFFER_SIZE; if (properties.containsKey("pherf.mt.buffer_size_per_scenario")) { bufferSize = Integer.parseInt((String)properties.get("pherf.mt.buffer_size_per_scenario")); } disruptor = new Disruptor<>(TenantOperationEvent.EVENT_FACTORY, bufferSize, new ThreadFactoryBuilder() .setNameFormat(currentThreadName + "." + scenario.getName()) .setUncaughtExceptionHandler(Threads.LOGGING_EXCEPTION_HANDLER) .build(), ProducerType.SINGLE, new BlockingWaitStrategy()); this.disruptor.setDefaultExceptionHandler(this.exceptionHandler); this.disruptor.handleEventsWithWorkerPool(this.handlers.toArray(new WorkHandler[] {})); RingBuffer<TenantOperationEvent> ringBuffer = this.disruptor.start(); long numOperations = scenario.getLoadProfile().getNumOperations(); while (numOperations > 0) { TenantOperationInfo sample = next(); operationFactory.initializeTenant(sample); --numOperations; // Publishers claim events in sequence long sequence = ringBuffer.next(); TenantOperationEvent event = ringBuffer.get(sequence); event.setTenantOperationInfo(sample); // make the event available to EventProcessors ringBuffer.publish(sequence); LOGGER.info(String.format("published : %s:%s:%d, %d, %d", scenario.getName(), scenario.getTableName(), numOperations, ringBuffer.getCursor(), sequence)); } } @Override public void stop() throws Exception { // Wait for the handlers to finish the jobs if (disruptor != null) { disruptor.shutdown(); } // TODO need to handle asynchronous result publishing } @Override public List<PherfWorkHandler> getWorkHandlers(Properties properties) { String handlerName = ""; try { handlerName = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { throw new RuntimeException(e); } int handlerCount = DEFAULT_NUM_HANDLER_PER_SCENARIO; if (properties.containsKey("pherf.mt.handlers_per_scenario")) { handlerCount = Integer.parseInt((String)properties.get("pherf.mt.handlers_per_scenario")); } List<PherfWorkHandler> workers = Lists.newArrayListWithCapacity(handlerCount); for (int i = 0; i < handlerCount; i++) { String handlerId = String.format("%s.%d", handlerName, i + 1); workers.add(new TenantOperationWorkHandler(operationFactory, handlerId)); } return workers; } abstract public TenantOperationInfo next(); }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Collection; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableDescriptors; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; /** * Test open and close of regions using zk. */ @Category(MediumTests.class) public class TestZKBasedOpenCloseRegion { private static final Log LOG = LogFactory.getLog(TestZKBasedOpenCloseRegion.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLENAME = TableName.valueOf("TestZKBasedOpenCloseRegion"); private static final byte [][] FAMILIES = new byte [][] {Bytes.toBytes("a"), Bytes.toBytes("b"), Bytes.toBytes("c")}; private static int countOfRegions; @BeforeClass public static void beforeAllTests() throws Exception { Configuration c = TEST_UTIL.getConfiguration(); c.setBoolean("dfs.support.append", true); c.setInt("hbase.regionserver.info.port", 0); TEST_UTIL.startMiniCluster(2); TEST_UTIL.createTable(TABLENAME, FAMILIES); HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); countOfRegions = TEST_UTIL.createMultiRegions(t, getTestFamily()); waitUntilAllRegionsAssigned(); addToEachStartKey(countOfRegions); t.close(); TEST_UTIL.getHBaseCluster().getMaster().assignmentManager.initializeHandlerTrackers(); } @AfterClass public static void afterAllTests() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Before public void setup() throws IOException { if (TEST_UTIL.getHBaseCluster().getLiveRegionServerThreads().size() < 2) { // Need at least two servers. LOG.info("Started new server=" + TEST_UTIL.getHBaseCluster().startRegionServer()); } waitUntilAllRegionsAssigned(); waitOnRIT(); } /** * Test we reopen a region once closed. * @throws Exception */ @Test (timeout=300000) public void testReOpenRegion() throws Exception { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); LOG.info("Number of region servers = " + cluster.getLiveRegionServerThreads().size()); int rsIdx = 0; HRegionServer regionServer = TEST_UTIL.getHBaseCluster().getRegionServer(rsIdx); HRegionInfo hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(regionServer)); LOG.debug("Asking RS to close region " + hri.getRegionNameAsString()); LOG.info("Unassign " + hri.getRegionNameAsString()); cluster.getMaster().assignmentManager.unassign(hri); while (!cluster.getMaster().assignmentManager.wasClosedHandlerCalled(hri)) { Threads.sleep(100); } while (!cluster.getMaster().assignmentManager.wasOpenedHandlerCalled(hri)) { Threads.sleep(100); } LOG.info("Done with testReOpenRegion"); } private HRegionInfo getNonMetaRegion(final Collection<HRegionInfo> regions) { HRegionInfo hri = null; for (HRegionInfo i: regions) { LOG.info(i.getRegionNameAsString()); if (!i.isMetaRegion()) { hri = i; break; } } return hri; } /** * This test shows how a region won't be able to be assigned to a RS * if it's already "processing" it. * @throws Exception */ @Test public void testRSAlreadyProcessingRegion() throws Exception { LOG.info("starting testRSAlreadyProcessingRegion"); MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HRegionServer hr0 = cluster.getLiveRegionServerThreads().get(0).getRegionServer(); HRegionServer hr1 = cluster.getLiveRegionServerThreads().get(1).getRegionServer(); HRegionInfo hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(hr0)); // fake that hr1 is processing the region hr1.getRegionsInTransitionInRS().putIfAbsent(hri.getEncodedNameAsBytes(), true); // now ask the master to move the region to hr1, will fail TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(), Bytes.toBytes(hr1.getServerName().toString())); // make sure the region came back assertEquals(hr1.getOnlineRegion(hri.getEncodedNameAsBytes()), null); // remove the block and reset the boolean hr1.getRegionsInTransitionInRS().remove(hri.getEncodedNameAsBytes()); // now try moving a region when there is no region in transition. hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(hr1)); TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(), Bytes.toBytes(hr0.getServerName().toString())); while (!cluster.getMaster().assignmentManager.wasOpenedHandlerCalled(hri)) { Threads.sleep(100); } // make sure the region has moved from the original RS assertTrue(hr1.getOnlineRegion(hri.getEncodedNameAsBytes()) == null); } @Test (timeout=300000) public void testCloseRegion() throws Exception { LOG.info("Running testCloseRegion"); MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); LOG.info("Number of region servers = " + cluster.getLiveRegionServerThreads().size()); int rsIdx = 0; HRegionServer regionServer = TEST_UTIL.getHBaseCluster().getRegionServer(rsIdx); HRegionInfo hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(regionServer)); LOG.debug("Asking RS to close region " + hri.getRegionNameAsString()); cluster.getMaster().assignmentManager.unassign(hri); while (!cluster.getMaster().assignmentManager.wasClosedHandlerCalled(hri)) { Threads.sleep(100); } LOG.info("Done with testCloseRegion"); } private void waitOnRIT() { // Close worked but we are going to open the region elsewhere. Before going on, make sure // this completes. while (TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(). getRegionStates().isRegionsInTransition()) { LOG.info("Waiting on regions in transition: " + TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(). getRegionStates().getRegionsInTransition()); Threads.sleep(10); } } /** * If region open fails with IOException in openRegion() while doing tableDescriptors.get() * the region should not add into regionsInTransitionInRS map * @throws Exception */ @Test public void testRegionOpenFailsDueToIOException() throws Exception { HRegionInfo REGIONINFO = new HRegionInfo(TableName.valueOf("t"), HConstants.EMPTY_START_ROW, HConstants.EMPTY_START_ROW); HRegionServer regionServer = TEST_UTIL.getHBaseCluster().getRegionServer(0); TableDescriptors htd = Mockito.mock(TableDescriptors.class); Object orizinalState = Whitebox.getInternalState(regionServer,"tableDescriptors"); Whitebox.setInternalState(regionServer, "tableDescriptors", htd); Mockito.doThrow(new IOException()).when(htd).get((TableName) Mockito.any()); try { ProtobufUtil.openRegion(regionServer, REGIONINFO); fail("It should throw IOException "); } catch (IOException e) { } Whitebox.setInternalState(regionServer, "tableDescriptors", orizinalState); assertFalse("Region should not be in RIT", regionServer.getRegionsInTransitionInRS().containsKey(REGIONINFO.getEncodedNameAsBytes())); } private static void waitUntilAllRegionsAssigned() throws IOException { HTable meta = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); while (true) { int rows = 0; Scan scan = new Scan(); scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER); ResultScanner s = meta.getScanner(scan); for (Result r = null; (r = s.next()) != null;) { byte [] b = r.getValue(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER); if (b == null || b.length <= 0) { break; } rows++; } s.close(); // If I get to here and all rows have a Server, then all have been assigned. if (rows >= countOfRegions) { break; } LOG.info("Found=" + rows); Threads.sleep(1000); } meta.close(); } /* * Add to each of the regions in hbase:meta a value. Key is the startrow of the * region (except its 'aaa' for first region). Actual value is the row name. * @param expected * @return * @throws IOException */ private static int addToEachStartKey(final int expected) throws IOException { HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); HTable meta = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); int rows = 0; Scan scan = new Scan(); scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); ResultScanner s = meta.getScanner(scan); for (Result r = null; (r = s.next()) != null;) { HRegionInfo hri = HRegionInfo.getHRegionInfo(r); if (hri == null) break; if(!hri.getTable().equals(TABLENAME)) { continue; } // If start key, add 'aaa'. byte [] row = getStartKey(hri); Put p = new Put(row); p.setDurability(Durability.SKIP_WAL); p.add(getTestFamily(), getTestQualifier(), row); t.put(p); rows++; } s.close(); Assert.assertEquals(expected, rows); t.close(); meta.close(); return rows; } private static byte [] getStartKey(final HRegionInfo hri) { return Bytes.equals(HConstants.EMPTY_START_ROW, hri.getStartKey())? Bytes.toBytes("aaa"): hri.getStartKey(); } private static byte [] getTestFamily() { return FAMILIES[0]; } private static byte [] getTestQualifier() { return getTestFamily(); } public static void main(String args[]) throws Exception { TestZKBasedOpenCloseRegion.beforeAllTests(); TestZKBasedOpenCloseRegion test = new TestZKBasedOpenCloseRegion(); test.setup(); test.testCloseRegion(); TestZKBasedOpenCloseRegion.afterAllTests(); } }
package org.mondo.collaboration.security.query; import java.util.Arrays; import java.util.List; import org.eclipse.incquery.runtime.api.IPatternMatch; import org.eclipse.incquery.runtime.api.impl.BasePatternMatch; import org.eclipse.incquery.runtime.exception.IncQueryException; import org.mondo.collaboration.security.query.util.LockAHelperQuerySpecification; import wt.Control; import wt.Signal; /** * Pattern-specific match representation of the org.mondo.collaboration.security.query.lockAHelper pattern, * to be used in conjunction with {@link LockAHelperMatcher}. * * <p>Class fields correspond to parameters of the pattern. Fields with value null are considered unassigned. * Each instance is a (possibly partial) substitution of pattern parameters, * usable to represent a match of the pattern in the result of a query, * or to specify the bound (fixed) input parameters when issuing a query. * * @see LockAHelperMatcher * @see LockAHelperProcessor * */ @SuppressWarnings("all") public abstract class LockAHelperMatch extends BasePatternMatch { private Signal fSignal; private String fCycle; private Integer fValue; private Control fCtrl; private static List<String> parameterNames = makeImmutableList("signal", "cycle", "value", "ctrl"); private LockAHelperMatch(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { this.fSignal = pSignal; this.fCycle = pCycle; this.fValue = pValue; this.fCtrl = pCtrl; } @Override public Object get(final String parameterName) { if ("signal".equals(parameterName)) return this.fSignal; if ("cycle".equals(parameterName)) return this.fCycle; if ("value".equals(parameterName)) return this.fValue; if ("ctrl".equals(parameterName)) return this.fCtrl; return null; } public Signal getSignal() { return this.fSignal; } public String getCycle() { return this.fCycle; } public Integer getValue() { return this.fValue; } public Control getCtrl() { return this.fCtrl; } @Override public boolean set(final String parameterName, final Object newValue) { if (!isMutable()) throw new java.lang.UnsupportedOperationException(); if ("signal".equals(parameterName) ) { this.fSignal = (wt.Signal) newValue; return true; } if ("cycle".equals(parameterName) ) { this.fCycle = (java.lang.String) newValue; return true; } if ("value".equals(parameterName) ) { this.fValue = (java.lang.Integer) newValue; return true; } if ("ctrl".equals(parameterName) ) { this.fCtrl = (wt.Control) newValue; return true; } return false; } public void setSignal(final Signal pSignal) { if (!isMutable()) throw new java.lang.UnsupportedOperationException(); this.fSignal = pSignal; } public void setCycle(final String pCycle) { if (!isMutable()) throw new java.lang.UnsupportedOperationException(); this.fCycle = pCycle; } public void setValue(final Integer pValue) { if (!isMutable()) throw new java.lang.UnsupportedOperationException(); this.fValue = pValue; } public void setCtrl(final Control pCtrl) { if (!isMutable()) throw new java.lang.UnsupportedOperationException(); this.fCtrl = pCtrl; } @Override public String patternName() { return "org.mondo.collaboration.security.query.lockAHelper"; } @Override public List<String> parameterNames() { return LockAHelperMatch.parameterNames; } @Override public Object[] toArray() { return new Object[]{fSignal, fCycle, fValue, fCtrl}; } @Override public LockAHelperMatch toImmutable() { return isMutable() ? newMatch(fSignal, fCycle, fValue, fCtrl) : this; } @Override public String prettyPrint() { StringBuilder result = new StringBuilder(); result.append("\"signal\"=" + prettyPrintValue(fSignal) + ", "); result.append("\"cycle\"=" + prettyPrintValue(fCycle) + ", "); result.append("\"value\"=" + prettyPrintValue(fValue) + ", "); result.append("\"ctrl\"=" + prettyPrintValue(fCtrl) ); return result.toString(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((fSignal == null) ? 0 : fSignal.hashCode()); result = prime * result + ((fCycle == null) ? 0 : fCycle.hashCode()); result = prime * result + ((fValue == null) ? 0 : fValue.hashCode()); result = prime * result + ((fCtrl == null) ? 0 : fCtrl.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) return true; if (!(obj instanceof LockAHelperMatch)) { // this should be infrequent if (obj == null) { return false; } if (!(obj instanceof IPatternMatch)) { return false; } IPatternMatch otherSig = (IPatternMatch) obj; if (!specification().equals(otherSig.specification())) return false; return Arrays.deepEquals(toArray(), otherSig.toArray()); } LockAHelperMatch other = (LockAHelperMatch) obj; if (fSignal == null) {if (other.fSignal != null) return false;} else if (!fSignal.equals(other.fSignal)) return false; if (fCycle == null) {if (other.fCycle != null) return false;} else if (!fCycle.equals(other.fCycle)) return false; if (fValue == null) {if (other.fValue != null) return false;} else if (!fValue.equals(other.fValue)) return false; if (fCtrl == null) {if (other.fCtrl != null) return false;} else if (!fCtrl.equals(other.fCtrl)) return false; return true; } @Override public LockAHelperQuerySpecification specification() { try { return LockAHelperQuerySpecification.instance(); } catch (IncQueryException ex) { // This cannot happen, as the match object can only be instantiated if the query specification exists throw new IllegalStateException (ex); } } /** * Returns an empty, mutable match. * Fields of the mutable match can be filled to create a partial match, usable as matcher input. * * @return the empty match. * */ public static LockAHelperMatch newEmptyMatch() { return new Mutable(null, null, null, null); } /** * Returns a mutable (partial) match. * Fields of the mutable match can be filled to create a partial match, usable as matcher input. * * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @return the new, mutable (partial) match object. * */ public static LockAHelperMatch newMutableMatch(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { return new Mutable(pSignal, pCycle, pValue, pCtrl); } /** * Returns a new (partial) match. * This can be used e.g. to call the matcher with a partial match. * <p>The returned match will be immutable. Use {@link #newEmptyMatch()} to obtain a mutable match object. * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @return the (partial) match object. * */ public static LockAHelperMatch newMatch(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { return new Immutable(pSignal, pCycle, pValue, pCtrl); } private static final class Mutable extends LockAHelperMatch { Mutable(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { super(pSignal, pCycle, pValue, pCtrl); } @Override public boolean isMutable() { return true; } } private static final class Immutable extends LockAHelperMatch { Immutable(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { super(pSignal, pCycle, pValue, pCtrl); } @Override public boolean isMutable() { return false; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package myservice.mynamespace.data; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; import java.util.List; import java.util.Locale; import org.apache.olingo.commons.api.Constants; import org.apache.olingo.commons.api.data.Entity; import org.apache.olingo.commons.api.data.EntityCollection; import org.apache.olingo.commons.api.data.Link; import org.apache.olingo.commons.api.data.Property; import org.apache.olingo.commons.api.data.ValueType; import org.apache.olingo.commons.api.edm.Edm; import org.apache.olingo.commons.api.edm.EdmEntitySet; import org.apache.olingo.commons.api.edm.EdmEntityType; import org.apache.olingo.commons.api.edm.EdmKeyPropertyRef; import org.apache.olingo.commons.api.edm.EdmNavigationProperty; import org.apache.olingo.commons.api.ex.ODataRuntimeException; import org.apache.olingo.commons.api.http.HttpMethod; import org.apache.olingo.commons.api.http.HttpStatusCode; import org.apache.olingo.server.api.OData; import org.apache.olingo.server.api.ODataApplicationException; import org.apache.olingo.server.api.deserializer.DeserializerException; import org.apache.olingo.server.api.uri.UriParameter; import org.apache.olingo.server.api.uri.UriResourceEntitySet; import org.apache.olingo.server.api.uri.UriResourceNavigation; import myservice.mynamespace.service.DemoEdmProvider; import myservice.mynamespace.util.Util; public class Storage { private OData odata; private Edm edm; final private TransactionalEntityManager manager; public Storage(final OData odata, final Edm edm) { this.odata = odata; this.edm = edm; this.manager = new TransactionalEntityManager(edm); initProductSampleData(); initCategorySampleData(); linkProductsAndCategories(); } /* PUBLIC FACADE */ public void beginTransaction() throws ODataApplicationException { manager.beginTransaction(); } public void rollbackTransaction() throws ODataApplicationException { manager.rollbackTransaction(); } public void commitTransaction() throws ODataApplicationException { manager.commitTransaction(); } public EntityCollection readEntitySetData(EdmEntitySet edmEntitySet) throws ODataApplicationException { if (edmEntitySet.getName().equals(DemoEdmProvider.ES_PRODUCTS_NAME)) { return getEntityCollection(manager.getEntityCollection(DemoEdmProvider.ES_PRODUCTS_NAME)); } else if(edmEntitySet.getName().equals(DemoEdmProvider.ES_CATEGORIES_NAME)) { return getEntityCollection(manager.getEntityCollection(DemoEdmProvider.ES_CATEGORIES_NAME)); } return null; } public Entity readEntityData(EdmEntitySet edmEntitySet, List<UriParameter> keyParams) throws ODataApplicationException { EdmEntityType edmEntityType = edmEntitySet.getEntityType(); if (edmEntitySet.getName().equals(DemoEdmProvider.ES_PRODUCTS_NAME)) { return getEntity(edmEntityType, keyParams, manager.getEntityCollection(DemoEdmProvider.ES_PRODUCTS_NAME)); } else if(edmEntitySet.getName().equals(DemoEdmProvider.ES_CATEGORIES_NAME)) { return getEntity(edmEntityType, keyParams, manager.getEntityCollection(DemoEdmProvider.ES_CATEGORIES_NAME)); } return null; } public Entity createEntityData(EdmEntitySet edmEntitySet, Entity entityToCreate, String rawServiceUri) throws ODataApplicationException { EdmEntityType edmEntityType = edmEntitySet.getEntityType(); if (edmEntitySet.getName().equals(DemoEdmProvider.ES_PRODUCTS_NAME)) { return createEntity(edmEntitySet, edmEntityType, entityToCreate, manager.getEntityCollection(DemoEdmProvider.ES_PRODUCTS_NAME), rawServiceUri); } else if(edmEntitySet.getName().equals(DemoEdmProvider.ES_CATEGORIES_NAME)) { return createEntity(edmEntitySet, edmEntityType, entityToCreate, manager.getEntityCollection(DemoEdmProvider.ES_CATEGORIES_NAME), rawServiceUri); } return null; } /** * This method is invoked for PATCH or PUT requests * */ public void updateEntityData(EdmEntitySet edmEntitySet, List<UriParameter> keyParams, Entity updateEntity, HttpMethod httpMethod) throws ODataApplicationException { EdmEntityType edmEntityType = edmEntitySet.getEntityType(); if (edmEntitySet.getName().equals(DemoEdmProvider.ES_PRODUCTS_NAME)) { updateEntity(edmEntityType, keyParams, updateEntity, httpMethod, manager.getEntityCollection(DemoEdmProvider.ES_PRODUCTS_NAME)); } else if(edmEntitySet.getName().equals(DemoEdmProvider.ES_CATEGORIES_NAME)) { updateEntity(edmEntityType, keyParams, updateEntity, httpMethod, manager.getEntityCollection(DemoEdmProvider.ES_CATEGORIES_NAME)); } } public void deleteEntityData(EdmEntitySet edmEntitySet, List<UriParameter> keyParams) throws ODataApplicationException { EdmEntityType edmEntityType = edmEntitySet.getEntityType(); if (edmEntitySet.getName().equals(DemoEdmProvider.ES_PRODUCTS_NAME)) { deleteEntity(edmEntityType, keyParams, manager.getEntityCollection(DemoEdmProvider.ES_PRODUCTS_NAME)); } else if(edmEntitySet.getName().equals(DemoEdmProvider.ES_CATEGORIES_NAME)) { deleteEntity(edmEntityType, keyParams, manager.getEntityCollection(DemoEdmProvider.ES_CATEGORIES_NAME)); } } // Navigation public Entity getRelatedEntity(Entity entity, UriResourceNavigation navigationResource) throws ODataApplicationException { final EdmNavigationProperty edmNavigationProperty = navigationResource.getProperty(); if(edmNavigationProperty.isCollection()) { return Util.findEntity(edmNavigationProperty.getType(), getRelatedEntityCollection(entity, navigationResource), navigationResource.getKeyPredicates()); } else { final Link link = entity.getNavigationLink(edmNavigationProperty.getName()); return link == null ? null : link.getInlineEntity(); } } public EntityCollection getRelatedEntityCollection(Entity entity, UriResourceNavigation navigationResource) { final Link link = entity.getNavigationLink(navigationResource.getProperty().getName()); return link == null ? new EntityCollection() : link.getInlineEntitySet(); } /* INTERNAL */ private EntityCollection getEntityCollection(final List<Entity> entityList) { EntityCollection retEntitySet = new EntityCollection(); retEntitySet.getEntities().addAll(entityList); return retEntitySet; } private Entity getEntity(EdmEntityType edmEntityType, List<UriParameter> keyParams, List<Entity> entityList) throws ODataApplicationException { // the list of entities at runtime EntityCollection entitySet = getEntityCollection(entityList); /* generic approach to find the requested entity */ Entity requestedEntity = Util.findEntity(edmEntityType, entitySet, keyParams); if (requestedEntity == null) { // this variable is null if our data doesn't contain an entity for the requested key // Throw suitable exception throw new ODataApplicationException("Entity for requested key doesn't exist", HttpStatusCode.NOT_FOUND.getStatusCode(), Locale.ENGLISH); } return requestedEntity; } private Entity createEntity(EdmEntitySet edmEntitySet, EdmEntityType edmEntityType, Entity entity, List<Entity> entityList, final String rawServiceUri) throws ODataApplicationException { // 1.) Create the entity final Entity newEntity = new Entity(); newEntity.setType(entity.getType()); // Create the new key of the entity int newId = 1; while (entityIdExists(newId, entityList)) { newId++; } // Add all provided properties newEntity.getProperties().addAll(entity.getProperties()); // Add the key property newEntity.getProperties().add(new Property(null, "ID", ValueType.PRIMITIVE, newId)); newEntity.setId(createId(newEntity, "ID")); // 2.1.) Apply binding links for(final Link link : entity.getNavigationBindings()) { final EdmNavigationProperty edmNavigationProperty = edmEntityType.getNavigationProperty(link.getTitle()); final EdmEntitySet targetEntitySet = (EdmEntitySet) edmEntitySet.getRelatedBindingTarget(link.getTitle()); if(edmNavigationProperty.isCollection() && link.getBindingLinks() != null) { for(final String bindingLink : link.getBindingLinks()) { final Entity relatedEntity = readEntityByBindingLink(bindingLink, targetEntitySet, rawServiceUri); createLink(edmNavigationProperty, newEntity, relatedEntity); } } else if(!edmNavigationProperty.isCollection() && link.getBindingLink() != null) { final Entity relatedEntity = readEntityByBindingLink(link.getBindingLink(), targetEntitySet, rawServiceUri); createLink(edmNavigationProperty, newEntity, relatedEntity); } } // 2.2.) Create nested entities for(final Link link : entity.getNavigationLinks()) { final EdmNavigationProperty edmNavigationProperty = edmEntityType.getNavigationProperty(link.getTitle()); final EdmEntitySet targetEntitySet = (EdmEntitySet) edmEntitySet.getRelatedBindingTarget(link.getTitle()); if(edmNavigationProperty.isCollection() && link.getInlineEntitySet() != null) { for(final Entity nestedEntity : link.getInlineEntitySet().getEntities()) { final Entity newNestedEntity = createEntityData(targetEntitySet, nestedEntity, rawServiceUri); createLink(edmNavigationProperty, newEntity, newNestedEntity); } } else if(!edmNavigationProperty.isCollection() && link.getInlineEntity() != null){ final Entity newNestedEntity = createEntityData(targetEntitySet, link.getInlineEntity(), rawServiceUri); createLink(edmNavigationProperty, newEntity, newNestedEntity); } } entityList.add(newEntity); return newEntity; } private Entity readEntityByBindingLink(final String entityId, final EdmEntitySet edmEntitySet, final String rawServiceUri) throws ODataApplicationException { UriResourceEntitySet entitySetResource = null; try { entitySetResource = odata.createUriHelper().parseEntityId(edm, entityId, rawServiceUri); if(!entitySetResource.getEntitySet().getName().equals(edmEntitySet.getName())) { throw new ODataApplicationException("Execpted an entity-id for entity set " + edmEntitySet.getName() + " but found id for entity set " + entitySetResource.getEntitySet().getName(), HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ENGLISH); } } catch (DeserializerException e) { throw new ODataApplicationException(entityId + " is not a valid entity-Id", HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ENGLISH); } return readEntityData(entitySetResource.getEntitySet(), entitySetResource.getKeyPredicates()); } private boolean entityIdExists(int id, List<Entity> entityList) { for (Entity entity : entityList) { Integer existingID = (Integer) entity.getProperty("ID").getValue(); if (existingID.intValue() == id) { return true; } } return false; } private void updateEntity(EdmEntityType edmEntityType, List<UriParameter> keyParams, Entity updateEntity, HttpMethod httpMethod, List<Entity> entityList) throws ODataApplicationException { Entity entity = getEntity(edmEntityType, keyParams, entityList); if (entity == null) { throw new ODataApplicationException("Entity not found", HttpStatusCode.NOT_FOUND.getStatusCode(), Locale.ENGLISH); } // loop over all properties and replace the values with the values of the given payload // Note: ignoring ComplexType, as we don't have it in our odata model List<Property> existingProperties = entity.getProperties(); for (Property existingProp : existingProperties) { String propName = existingProp.getName(); // ignore the key properties, they aren't updateable if (isKey(edmEntityType, propName)) { continue; } Property updateProperty = updateEntity.getProperty(propName); // the request payload might not consider ALL properties, so it can be null if (updateProperty == null) { // if a property has NOT been added to the request payload // depending on the HttpMethod, our behavior is different if (httpMethod.equals(HttpMethod.PATCH)) { // as of the OData spec, in case of PATCH, the existing property is not touched continue; // do nothing } else if (httpMethod.equals(HttpMethod.PUT)) { // as of the OData spec, in case of PUT, the existing property is set to null (or to default value) existingProp.setValue(existingProp.getValueType(), null); continue; } } // change the value of the properties existingProp.setValue(existingProp.getValueType(), updateProperty.getValue()); } } private void deleteEntity(EdmEntityType edmEntityType, List<UriParameter> keyParams, List<Entity> entityList) throws ODataApplicationException { Entity entity = getEntity(edmEntityType, keyParams, entityList); if (entity == null) { throw new ODataApplicationException("Entity not found", HttpStatusCode.NOT_FOUND.getStatusCode(), Locale.ENGLISH); } entityList.remove(entity); } /* HELPER */ private boolean isKey(EdmEntityType edmEntityType, String propertyName) { List<EdmKeyPropertyRef> keyPropertyRefs = edmEntityType.getKeyPropertyRefs(); for (EdmKeyPropertyRef propRef : keyPropertyRefs) { String keyPropertyName = propRef.getName(); if (keyPropertyName.equals(propertyName)) { return true; } } return false; } private void initProductSampleData() { final List<Entity> productList = manager.getEntityCollection(DemoEdmProvider.ES_PRODUCTS_NAME); Entity entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 0)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "Notebook Basic 15")); entity.addProperty(new Property(null, "Description", ValueType.PRIMITIVE, "Notebook Basic, 1.7GHz - 15 XGA - 1024MB DDR2 SDRAM - 40GB")); entity.setType(DemoEdmProvider.ET_PRODUCT_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); productList.add(entity); entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 1)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "Notebook Professional 17")); entity.addProperty(new Property(null, "Description", ValueType.PRIMITIVE, "Notebook Professional, 2.8GHz - 15 XGA - 8GB DDR3 RAM - 500GB")); entity.setType(DemoEdmProvider.ET_PRODUCT_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); productList.add(entity); entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 2)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "1UMTS PDA")); entity.addProperty(new Property(null, "Description", ValueType.PRIMITIVE, "Ultrafast 3G UMTS/HSDPA Pocket PC, supports GSM network")); entity.setType(DemoEdmProvider.ET_PRODUCT_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); productList.add(entity); entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 3)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "Comfort Easy")); entity.addProperty(new Property(null, "Description", ValueType.PRIMITIVE, "32 GB Digital Assitant with high-resolution color screen")); entity.setType(DemoEdmProvider.ET_PRODUCT_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); productList.add(entity); entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 4)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "Ergo Screen")); entity.addProperty(new Property(null, "Description", ValueType.PRIMITIVE, "19 Optimum Resolution 1024 x 768 @ 85Hz, resolution 1280 x 960")); entity.setType(DemoEdmProvider.ET_PRODUCT_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); productList.add(entity); entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 5)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "Flat Basic")); entity.addProperty(new Property(null, "Description", ValueType.PRIMITIVE, "Optimum Hi-Resolution max. 1600 x 1200 @ 85Hz, Dot Pitch: 0.24mm")); entity.setType(DemoEdmProvider.ET_PRODUCT_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); productList.add(entity); } private void initCategorySampleData() { final List<Entity> categoryList = manager.getEntityCollection(DemoEdmProvider.ES_CATEGORIES_NAME); Entity entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 0)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "Notebooks")); entity.setType(DemoEdmProvider.ET_CATEGORY_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); categoryList.add(entity); entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 1)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "Organizers")); entity.setType(DemoEdmProvider.ET_CATEGORY_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); categoryList.add(entity); entity = new Entity(); entity.addProperty(new Property(null, "ID", ValueType.PRIMITIVE, 2)); entity.addProperty(new Property(null, "Name", ValueType.PRIMITIVE, "Monitors")); entity.setType(DemoEdmProvider.ET_CATEGORY_FQN.getFullQualifiedNameAsString()); entity.setId(createId(entity, "ID")); categoryList.add(entity); } private void linkProductsAndCategories() { final List<Entity> productList = manager.getEntityCollection(DemoEdmProvider.ES_PRODUCTS_NAME); final List<Entity> categoryList = manager.getEntityCollection(DemoEdmProvider.ES_CATEGORIES_NAME); setLink(productList.get(0), "Category", categoryList.get(0)); setLink(productList.get(1), "Category", categoryList.get(0)); setLink(productList.get(2), "Category", categoryList.get(1)); setLink(productList.get(3), "Category", categoryList.get(1)); setLink(productList.get(4), "Category", categoryList.get(2)); setLink(productList.get(5), "Category", categoryList.get(2)); setLinks(categoryList.get(0), "Products", productList.subList(0, 2).toArray(new Entity[0])); setLinks(categoryList.get(1), "Products", productList.subList(2, 4).toArray(new Entity[0])); setLinks(categoryList.get(2), "Products", productList.subList(4, 6).toArray(new Entity[0])); } private URI createId(Entity entity, String idPropertyName) { return createId(entity, idPropertyName, null); } private URI createId(Entity entity, String idPropertyName, String navigationName) { try { StringBuilder sb = new StringBuilder(getEntitySetName(entity)).append("("); final Property property = entity.getProperty(idPropertyName); sb.append(property.asPrimitive()).append(")"); if(navigationName != null) { sb.append("/").append(navigationName); } return new URI(sb.toString()); } catch (URISyntaxException e) { throw new ODataRuntimeException("Unable to create (Atom) id for entity: " + entity, e); } } private String getEntitySetName(Entity entity) { if(DemoEdmProvider.ET_CATEGORY_FQN.getFullQualifiedNameAsString().equals(entity.getType())) { return DemoEdmProvider.ES_CATEGORIES_NAME; } else if(DemoEdmProvider.ET_PRODUCT_FQN.getFullQualifiedNameAsString().equals(entity.getType())) { return DemoEdmProvider.ES_PRODUCTS_NAME; } return entity.getType(); } private void createLink(final EdmNavigationProperty navigationProperty, final Entity srcEntity, final Entity destEntity) { setLink(navigationProperty, srcEntity, destEntity); final EdmNavigationProperty partnerNavigationProperty = navigationProperty.getPartner(); if (partnerNavigationProperty != null) { setLink(partnerNavigationProperty, destEntity, srcEntity); } } private void setLink(final EdmNavigationProperty navigationProperty, final Entity srcEntity, final Entity targetEntity) { if (navigationProperty.isCollection()) { setLinks(srcEntity, navigationProperty.getName(), targetEntity); } else { setLink(srcEntity, navigationProperty.getName(), targetEntity); } } private void setLink(final Entity entity, final String navigationPropertyName, final Entity target) { Link link = entity.getNavigationLink(navigationPropertyName); if (link == null) { link = new Link(); link.setRel(Constants.NS_NAVIGATION_LINK_REL + navigationPropertyName); link.setType(Constants.ENTITY_NAVIGATION_LINK_TYPE); link.setTitle(navigationPropertyName); link.setHref(target.getId().toASCIIString()); entity.getNavigationLinks().add(link); } link.setInlineEntity(target); } private void setLinks(final Entity entity, final String navigationPropertyName, final Entity... targets) { Link link = entity.getNavigationLink(navigationPropertyName); if (link == null) { link = new Link(); link.setRel(Constants.NS_NAVIGATION_LINK_REL + navigationPropertyName); link.setType(Constants.ENTITY_SET_NAVIGATION_LINK_TYPE); link.setTitle(navigationPropertyName); link.setHref(entity.getId().toASCIIString() + "/" + navigationPropertyName); EntityCollection target = new EntityCollection(); target.getEntities().addAll(Arrays.asList(targets)); link.setInlineEntitySet(target); entity.getNavigationLinks().add(link); } else { link.getInlineEntitySet().getEntities().addAll(Arrays.asList(targets)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.layoutmgr.inline; import org.apache.fop.fo.Constants; import org.apache.fop.traits.WritingMode; /** * The FOP specific incarnation of the XSL-FO scaled baseline table. * All baseline tables are scaled to the font size of the font they * apply to. This class uses a coordinate system with its origin * where the dominant baseline intersects the start edge of the box. * All measurements are in mpt. */ final class ScaledBaselineTable { private static final float HANGING_BASELINE_FACTOR = 0.8f; private static final float MATHEMATICAL_BASELINE_FACTOR = 0.5f; private final int altitude; private final int depth; private final int xHeight; private final int dominantBaselineIdentifier; private final WritingMode writingMode; private final int dominantBaselineOffset; private int beforeEdgeOffset; private int afterEdgeOffset; /** * * Creates a new instance of BasicScaledBaselineTable for the given * altitude, depth, xHeight, baseline and writing mode. * @param altitude the height of the box or the font ascender * @param depth the font descender or 0 * @param xHeight the font xHeight * @param dominantBaselineIdentifier the dominant baseline given as an integer constant * @param writingMode the writing mode given as an integer constant */ ScaledBaselineTable(int altitude, int depth, int xHeight, int dominantBaselineIdentifier, WritingMode writingMode) { this.altitude = altitude; this.depth = depth; this.xHeight = xHeight; this.dominantBaselineIdentifier = dominantBaselineIdentifier; this.writingMode = writingMode; this.dominantBaselineOffset = getBaselineDefaultOffset(this.dominantBaselineIdentifier); this.beforeEdgeOffset = altitude - dominantBaselineOffset; this.afterEdgeOffset = depth - dominantBaselineOffset; } /** * Return the dominant baseline for this baseline table. * @return the dominant baseline */ int getDominantBaselineIdentifier() { return this.dominantBaselineIdentifier; } /** * Return the writing mode for this baseline table. * @return the writing mode */ WritingMode getWritingMode() { return this.writingMode; } /** * Return the offset of the given baseline from the dominant baseline. * * @param baselineIdentifier a baseline identifier * @return the offset from the dominant baseline */ int getBaseline(int baselineIdentifier) { int offset = 0; if (!isHorizontalWritingMode()) { switch (baselineIdentifier) { case Constants.EN_TOP: case Constants.EN_TEXT_TOP: case Constants.EN_TEXT_BOTTOM: case Constants.EN_BOTTOM: throw new IllegalArgumentException("Baseline " + baselineIdentifier + " only supported for horizontal writing modes"); default: // TODO } } switch (baselineIdentifier) { case Constants.EN_TOP: // fall through case Constants.EN_BEFORE_EDGE: offset = beforeEdgeOffset; break; case Constants.EN_TEXT_TOP: case Constants.EN_TEXT_BEFORE_EDGE: case Constants.EN_HANGING: case Constants.EN_CENTRAL: case Constants.EN_MIDDLE: case Constants.EN_MATHEMATICAL: case Constants.EN_ALPHABETIC: case Constants.EN_IDEOGRAPHIC: case Constants.EN_TEXT_BOTTOM: case Constants.EN_TEXT_AFTER_EDGE: offset = getBaselineDefaultOffset(baselineIdentifier) - dominantBaselineOffset; break; case Constants.EN_BOTTOM: // fall through case Constants.EN_AFTER_EDGE: offset = afterEdgeOffset; break; default: throw new IllegalArgumentException(String.valueOf(baselineIdentifier)); } return offset; } private boolean isHorizontalWritingMode() { return writingMode.isHorizontal(); } /** * Return the baseline offset measured from the font's default * baseline for the given baseline. * @param baselineIdentifier the baseline identifier * @return the baseline offset */ private int getBaselineDefaultOffset(int baselineIdentifier) { int offset = 0; switch (baselineIdentifier) { case Constants.EN_TEXT_BEFORE_EDGE: offset = altitude; break; case Constants.EN_HANGING: offset = Math.round(altitude * HANGING_BASELINE_FACTOR); break; case Constants.EN_CENTRAL: offset = (altitude - depth) / 2 + depth; break; case Constants.EN_MIDDLE: offset = xHeight / 2; break; case Constants.EN_MATHEMATICAL: offset = Math.round(altitude * MATHEMATICAL_BASELINE_FACTOR); break; case Constants.EN_ALPHABETIC: offset = 0; break; case Constants.EN_IDEOGRAPHIC: // Fall through case Constants.EN_TEXT_AFTER_EDGE: offset = depth; break; default: throw new IllegalArgumentException(String.valueOf(baselineIdentifier)); } return offset; } /** * Sets the position of the before and after baselines. * This is usually only done for line areas. For other * areas the position of the before and after baselines * are fixed when the table is constructed. * @param beforeBaseline the offset of the before-edge baseline from the dominant baseline * @param afterBaseline the offset of the after-edge baseline from the dominant baseline */ void setBeforeAndAfterBaselines(int beforeBaseline, int afterBaseline) { beforeEdgeOffset = beforeBaseline; afterEdgeOffset = afterBaseline; } /** * Return a new baseline table for the given baseline based * on the current baseline table. * @param baselineIdentifier the baseline identifer * @return a new baseline with the new baseline */ ScaledBaselineTable deriveScaledBaselineTable(int baselineIdentifier) { ScaledBaselineTable bac = new ScaledBaselineTable(altitude, depth, xHeight, baselineIdentifier, this.writingMode); return bac; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.metadata.utils; import org.apache.asterix.common.config.DatasetConfig.DatasetType; import org.apache.asterix.common.context.ITransactionSubsystemProvider; import org.apache.asterix.common.context.TransactionSubsystemProvider; import org.apache.asterix.common.exceptions.CompilationException; import org.apache.asterix.common.exceptions.ErrorCode; import org.apache.asterix.common.transactions.IRecoveryManager; import org.apache.asterix.common.transactions.JobId; import org.apache.asterix.dataflow.data.nontagged.MissingWriterFactory; import org.apache.asterix.formats.nontagged.BinaryComparatorFactoryProvider; import org.apache.asterix.metadata.declared.MetadataProvider; import org.apache.asterix.metadata.entities.Dataset; import org.apache.asterix.metadata.entities.Index; import org.apache.asterix.om.functions.BuiltinFunctions; import org.apache.asterix.om.functions.FunctionManagerHolder; import org.apache.asterix.om.functions.IFunctionDescriptor; import org.apache.asterix.runtime.operators.LSMSecondaryIndexBulkLoadOperatorDescriptor; import org.apache.asterix.runtime.operators.LSMSecondaryIndexCreationTupleProcessorOperatorDescriptor; import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexInstantSearchOperationCallbackFactory; import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig; import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory; import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory; import org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory; import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor; import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory; import org.apache.hyracks.api.dataflow.IOperatorDescriptor; import org.apache.hyracks.api.dataflow.value.IBinaryComparator; import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory; import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer; import org.apache.hyracks.api.dataflow.value.ITypeTraits; import org.apache.hyracks.api.dataflow.value.RecordDescriptor; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.job.JobSpecification; import org.apache.hyracks.data.std.primitive.BooleanPointable; import org.apache.hyracks.data.std.primitive.IntegerPointable; import org.apache.hyracks.dataflow.common.data.marshalling.BooleanSerializerDeserializer; import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer; import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor; import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory; import org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory; import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory; import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDiskComponentScanOperatorDescriptor; /** * This class is used to build secondary LSM index for correlated datasets. * * @author luochen * */ public abstract class SecondaryCorrelatedTreeIndexOperationsHelper extends SecondaryTreeIndexOperationsHelper { protected final static int COMPONENT_POS_OFFSET = 0; protected final static int ANTI_MATTER_OFFSET = 1; protected final static int NUM_TAG_FIELDS = 2; /** * Make sure tuples are in the descending order w.r.t. component_pos. * In the disk component list of an index, components are ordered from newest to oldest. * This descending order ensures older components can be bulk loaded first and get a smaller (older) * component file timestamp. */ protected final static IBinaryComparatorFactory COMPONENT_POS_COMPARATOR_FACTORY = new IBinaryComparatorFactory() { private static final long serialVersionUID = 1L; @Override public IBinaryComparator createBinaryComparator() { return new IBinaryComparator() { final IBinaryComparator comparator = BinaryComparatorFactoryProvider.INTEGER_POINTABLE_INSTANCE.createBinaryComparator(); @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) throws HyracksDataException { return -comparator.compare(b1, s1, l1, b2, s2, l2); } }; } }; protected SecondaryCorrelatedTreeIndexOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf, MetadataProvider metadataProvider) throws AlgebricksException { super(dataset, index, physOptConf, metadataProvider); } protected RecordDescriptor getTaggedRecordDescriptor(RecordDescriptor recDescriptor) { @SuppressWarnings("rawtypes") ISerializerDeserializer[] fields = new ISerializerDeserializer[recDescriptor.getFields().length + NUM_TAG_FIELDS]; ITypeTraits[] traits = null; if (recDescriptor.getTypeTraits() != null) { traits = new ITypeTraits[recDescriptor.getTypeTraits().length + NUM_TAG_FIELDS]; } //component position field fields[COMPONENT_POS_OFFSET] = IntegerSerializerDeserializer.INSTANCE; if (traits != null) { traits[COMPONENT_POS_OFFSET] = IntegerPointable.TYPE_TRAITS; } //anti-matter field fields[ANTI_MATTER_OFFSET] = BooleanSerializerDeserializer.INSTANCE; if (traits != null) { traits[ANTI_MATTER_OFFSET] = BooleanPointable.TYPE_TRAITS; } for (int i = NUM_TAG_FIELDS; i < fields.length; i++) { fields[i] = recDescriptor.getFields()[i - NUM_TAG_FIELDS]; if (traits != null && i < traits.length) { traits[i] = recDescriptor.getTypeTraits()[i - NUM_TAG_FIELDS]; } } return new RecordDescriptor(fields, traits); } protected IBinaryComparatorFactory[] getTaggedSecondaryComparatorFactories( IBinaryComparatorFactory[] secondaryComparatorFactories) { IBinaryComparatorFactory[] resultFactories = new IBinaryComparatorFactory[secondaryComparatorFactories.length + 1]; // order component ids from largest (oldest) to smallest (newest) // this is necessary since during bulk-loading, we need to create older components first resultFactories[COMPONENT_POS_OFFSET] = COMPONENT_POS_COMPARATOR_FACTORY; for (int i = 1; i < resultFactories.length; i++) { resultFactories[i] = secondaryComparatorFactories[i - 1]; } return resultFactories; } @Override protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType, boolean strictCast) throws AlgebricksException { IFunctionDescriptor castFuncDesc = FunctionManagerHolder.getFunctionManager() .lookupFunction(strictCast ? BuiltinFunctions.CAST_TYPE : BuiltinFunctions.CAST_TYPE_LAX); castFuncDesc.setImmutableStates(enforcedItemType, itemType); int[] outColumns = new int[1]; // tags(2) + primary keys + record + meta part(?) int[] projectionList = new int[NUM_TAG_FIELDS + (dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys]; int recordIdx = NUM_TAG_FIELDS + numPrimaryKeys; //here we only consider internal dataset assert dsType == DatasetType.INTERNAL; outColumns[0] = NUM_TAG_FIELDS + numPrimaryKeys; int projCount = 0; for (int i = 0; i < NUM_TAG_FIELDS; i++) { projectionList[projCount++] = i; } //set primary keys and the record for (int i = 0; i <= numPrimaryKeys; i++) { projectionList[projCount++] = NUM_TAG_FIELDS + i; } if (dataset.hasMetaPart()) { projectionList[NUM_TAG_FIELDS + numPrimaryKeys + 1] = NUM_TAG_FIELDS + numPrimaryKeys + 1; } IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) }; IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1]; sefs[0] = castFuncDesc.createEvaluatorFactory(castEvalFact); AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList); return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { getTaggedRecordDescriptor(enforcedRecDesc) }); } /** * It differs from its base class in that we need to add the extra tags to the fields. */ @Override protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec, int numSecondaryKeyFields, RecordDescriptor secondaryRecDesc) throws AlgebricksException { int[] outColumns = new int[numSecondaryKeyFields + numFilterFields]; int[] projectionList = new int[NUM_TAG_FIELDS + numSecondaryKeyFields + numPrimaryKeys + numFilterFields]; for (int i = 0; i < numSecondaryKeyFields + numFilterFields; i++) { outColumns[i] = NUM_TAG_FIELDS + numPrimaryKeys + i; } int projCount = 0; //set tag fields for (int i = 0; i < NUM_TAG_FIELDS; i++) { projectionList[projCount++] = i; } //set secondary keys for (int i = 0; i < numSecondaryKeyFields; i++) { projectionList[projCount++] = NUM_TAG_FIELDS + numPrimaryKeys + i; } //set primary keys for (int i = 0; i < numPrimaryKeys; i++) { projectionList[projCount++] = NUM_TAG_FIELDS + i; } //set filter fields if (numFilterFields > 0) { projectionList[projCount] = NUM_TAG_FIELDS + numPrimaryKeys + numSecondaryKeyFields; } IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length]; for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) { sefs[i] = secondaryFieldAccessEvalFactories[i]; } AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList); AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc }); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixAssignOp, primaryPartitionConstraint); return asterixAssignOp; } protected IOperatorDescriptor createTupleProcessorOp(JobSpecification spec, RecordDescriptor taggedSecondaryRecDesc, int numSecondaryKeyFields, int numPrimaryKeyFields, boolean hasBuddyBTree) { IOperatorDescriptor op = new LSMSecondaryIndexCreationTupleProcessorOperatorDescriptor(spec, taggedSecondaryRecDesc, MissingWriterFactory.INSTANCE, NUM_TAG_FIELDS, numSecondaryKeyFields, numPrimaryKeyFields, hasBuddyBTree); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, primaryPartitionConstraint); return op; } @Override protected ExternalSortOperatorDescriptor createSortOp(JobSpecification spec, IBinaryComparatorFactory[] taggedSecondaryComparatorFactories, RecordDescriptor taggedSecondaryRecDesc) { int[] taggedSortFields = new int[taggedSecondaryComparatorFactories.length]; //component pos taggedSortFields[COMPONENT_POS_OFFSET] = COMPONENT_POS_OFFSET; //not sorting on anti-matter field for (int i = 1; i < taggedSortFields.length; i++) { taggedSortFields[i] = i + 1; } ExternalSortOperatorDescriptor sortOp = new ExternalSortOperatorDescriptor(spec, physOptConf.getMaxFramesExternalSort(), taggedSortFields, taggedSecondaryComparatorFactories, taggedSecondaryRecDesc); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, sortOp, primaryPartitionConstraint); return sortOp; } protected LSMSecondaryIndexBulkLoadOperatorDescriptor createTreeIndexBulkLoadOp(JobSpecification spec, MetadataProvider metadataProvider, RecordDescriptor taggedSecondaryRecDesc, int numSecondaryKeys, int numPrimaryKeys, boolean hasBuddyBtree) throws AlgebricksException { IndexDataflowHelperFactory primaryIndexHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), primaryFileSplitProvider); IndexDataflowHelperFactory secondaryIndexHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), secondaryFileSplitProvider); LSMSecondaryIndexBulkLoadOperatorDescriptor treeIndexBulkLoadOp = new LSMSecondaryIndexBulkLoadOperatorDescriptor(spec, taggedSecondaryRecDesc, primaryIndexHelperFactory, secondaryIndexHelperFactory, NUM_TAG_FIELDS, numSecondaryKeys, numPrimaryKeys, hasBuddyBtree); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, treeIndexBulkLoadOp, secondaryPartitionConstraint); return treeIndexBulkLoadOp; } protected IOperatorDescriptor createPrimaryIndexScanDiskComponentsOp(JobSpecification spec, MetadataProvider metadataProvider, RecordDescriptor outRecDesc, JobId jobId) throws AlgebricksException { ITransactionSubsystemProvider txnSubsystemProvider = TransactionSubsystemProvider.INSTANCE; boolean temp = dataset.getDatasetDetails().isTemp(); ISearchOperationCallbackFactory searchCallbackFactory = temp ? NoOpOperationCallbackFactory.INSTANCE : new PrimaryIndexInstantSearchOperationCallbackFactory(jobId, dataset.getDatasetId(), dataset.getPrimaryBloomFilterFields(), txnSubsystemProvider, IRecoveryManager.ResourceType.LSM_BTREE); IndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), primaryFileSplitProvider); LSMBTreeDiskComponentScanOperatorDescriptor primaryScanOp = new LSMBTreeDiskComponentScanOperatorDescriptor( spec, outRecDesc, indexHelperFactory, searchCallbackFactory); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primaryScanOp, primaryPartitionConstraint); return primaryScanOp; } public static SecondaryIndexOperationsHelper createIndexOperationsHelper(Dataset dataset, Index index, MetadataProvider metadataProvider, PhysicalOptimizationConfig physOptConf) throws AlgebricksException { SecondaryIndexOperationsHelper indexOperationsHelper = null; switch (index.getIndexType()) { case BTREE: indexOperationsHelper = new SecondaryCorrelatedBTreeOperationsHelper(dataset, index, physOptConf, metadataProvider); break; case RTREE: //TODO RTree case SINGLE_PARTITION_WORD_INVIX: case SINGLE_PARTITION_NGRAM_INVIX: case LENGTH_PARTITIONED_WORD_INVIX: case LENGTH_PARTITIONED_NGRAM_INVIX: //TODO Inverted Index //TODO This will be fixed soon throw new UnsupportedOperationException(); default: throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, index.getIndexType()); } indexOperationsHelper.init(); return indexOperationsHelper; } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.http; import java.io.Serializable; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.InvalidMimeTypeException; import org.springframework.util.MimeType; import org.springframework.util.MimeTypeUtils; import org.springframework.util.StringUtils; import org.springframework.util.comparator.CompoundComparator; /** * A sub-class of {@link MimeType} that adds support for quality parameters as defined * in the HTTP specification. * * @author Arjen Poutsma * @author Juergen Hoeller * @author Rossen Stoyanchev * @author Sebastien Deleuze * @since 3.0 * @see <a href="http://tools.ietf.org/html/rfc7231#section-3.1.1.1">HTTP 1.1: Semantics and Content, section 3.1.1.1</a> */ public class MediaType extends MimeType implements Serializable { private static final long serialVersionUID = 2069937152339670231L; /** * Public constant media type that includes all media ranges (i.e. "&#42;/&#42;"). */ public static final MediaType ALL; /** * A String equivalent of {@link MediaType#ALL}. */ public static final String ALL_VALUE = "*/*"; /** * Public constant media type for {@code application/atom+xml}. */ public final static MediaType APPLICATION_ATOM_XML; /** * A String equivalent of {@link MediaType#APPLICATION_ATOM_XML}. */ public final static String APPLICATION_ATOM_XML_VALUE = "application/atom+xml"; /** * Public constant media type for {@code application/x-www-form-urlencoded}. */ public final static MediaType APPLICATION_FORM_URLENCODED; /** * A String equivalent of {@link MediaType#APPLICATION_FORM_URLENCODED}. */ public final static String APPLICATION_FORM_URLENCODED_VALUE = "application/x-www-form-urlencoded"; /** * Public constant media type for {@code application/json}. * @see #APPLICATION_JSON_UTF8 */ public final static MediaType APPLICATION_JSON; /** * A String equivalent of {@link MediaType#APPLICATION_JSON}. * @see #APPLICATION_JSON_UTF8_VALUE */ public final static String APPLICATION_JSON_VALUE = "application/json"; /** * Public constant media type for {@code application/json;charset=UTF-8}. */ public final static MediaType APPLICATION_JSON_UTF8; /** * A String equivalent of {@link MediaType#APPLICATION_JSON_UTF8}. */ public final static String APPLICATION_JSON_UTF8_VALUE = APPLICATION_JSON_VALUE + ";charset=UTF-8"; /** * Public constant media type for {@code application/octet-stream}. */ public final static MediaType APPLICATION_OCTET_STREAM; /** * A String equivalent of {@link MediaType#APPLICATION_OCTET_STREAM}. */ public final static String APPLICATION_OCTET_STREAM_VALUE = "application/octet-stream"; /** * Public constant media type for {@code application/pdf}. * @since 4.3 */ public final static MediaType APPLICATION_PDF; /** * A String equivalent of {@link MediaType#APPLICATION_PDF}. * @since 4.3 */ public final static String APPLICATION_PDF_VALUE = "application/pdf"; /** * Public constant media type for {@code application/xhtml+xml}. */ public final static MediaType APPLICATION_XHTML_XML; /** * A String equivalent of {@link MediaType#APPLICATION_XHTML_XML}. */ public final static String APPLICATION_XHTML_XML_VALUE = "application/xhtml+xml"; /** * Public constant media type for {@code application/xml}. */ public final static MediaType APPLICATION_XML; /** * A String equivalent of {@link MediaType#APPLICATION_XML}. */ public final static String APPLICATION_XML_VALUE = "application/xml"; /** * Public constant media type for {@code image/gif}. */ public final static MediaType IMAGE_GIF; /** * A String equivalent of {@link MediaType#IMAGE_GIF}. */ public final static String IMAGE_GIF_VALUE = "image/gif"; /** * Public constant media type for {@code image/jpeg}. */ public final static MediaType IMAGE_JPEG; /** * A String equivalent of {@link MediaType#IMAGE_JPEG}. */ public final static String IMAGE_JPEG_VALUE = "image/jpeg"; /** * Public constant media type for {@code image/png}. */ public final static MediaType IMAGE_PNG; /** * A String equivalent of {@link MediaType#IMAGE_PNG}. */ public final static String IMAGE_PNG_VALUE = "image/png"; /** * Public constant media type for {@code multipart/form-data}. */ public final static MediaType MULTIPART_FORM_DATA; /** * A String equivalent of {@link MediaType#MULTIPART_FORM_DATA}. */ public final static String MULTIPART_FORM_DATA_VALUE = "multipart/form-data"; /** * Public constant media type for {@code text/event-stream}. * @see <a href="https://www.w3.org/TR/eventsource/">Server-Sent Events W3C recommendation</a> */ public final static MediaType TEXT_EVENT_STREAM; /** * A String equivalent of {@link MediaType#TEXT_EVENT_STREAM}. */ public final static String TEXT_EVENT_STREAM_VALUE = "text/event-stream"; /** * Public constant media type for {@code text/html}. */ public final static MediaType TEXT_HTML; /** * A String equivalent of {@link MediaType#TEXT_HTML}. */ public final static String TEXT_HTML_VALUE = "text/html"; /** * Public constant media type for {@code text/markdown}. * @since 4.3 */ public final static MediaType TEXT_MARKDOWN; /** * A String equivalent of {@link MediaType#TEXT_MARKDOWN}. * @since 4.3 */ public final static String TEXT_MARKDOWN_VALUE = "text/markdown"; /** * Public constant media type for {@code text/plain}. */ public final static MediaType TEXT_PLAIN; /** * A String equivalent of {@link MediaType#TEXT_PLAIN}. */ public final static String TEXT_PLAIN_VALUE = "text/plain"; /** * Public constant media type for {@code text/xml}. */ public final static MediaType TEXT_XML; /** * A String equivalent of {@link MediaType#TEXT_XML}. */ public final static String TEXT_XML_VALUE = "text/xml"; private static final String PARAM_QUALITY_FACTOR = "q"; static { ALL = valueOf(ALL_VALUE); APPLICATION_ATOM_XML = valueOf(APPLICATION_ATOM_XML_VALUE); APPLICATION_FORM_URLENCODED = valueOf(APPLICATION_FORM_URLENCODED_VALUE); APPLICATION_JSON = valueOf(APPLICATION_JSON_VALUE); APPLICATION_JSON_UTF8 = valueOf(APPLICATION_JSON_UTF8_VALUE); APPLICATION_OCTET_STREAM = valueOf(APPLICATION_OCTET_STREAM_VALUE); APPLICATION_PDF = valueOf(APPLICATION_PDF_VALUE); APPLICATION_XHTML_XML = valueOf(APPLICATION_XHTML_XML_VALUE); APPLICATION_XML = valueOf(APPLICATION_XML_VALUE); IMAGE_GIF = valueOf(IMAGE_GIF_VALUE); IMAGE_JPEG = valueOf(IMAGE_JPEG_VALUE); IMAGE_PNG = valueOf(IMAGE_PNG_VALUE); MULTIPART_FORM_DATA = valueOf(MULTIPART_FORM_DATA_VALUE); TEXT_EVENT_STREAM = valueOf(TEXT_EVENT_STREAM_VALUE); TEXT_HTML = valueOf(TEXT_HTML_VALUE); TEXT_MARKDOWN = valueOf(TEXT_MARKDOWN_VALUE); TEXT_PLAIN = valueOf(TEXT_PLAIN_VALUE); TEXT_XML = valueOf(TEXT_XML_VALUE); } /** * Create a new {@code MediaType} for the given primary type. * <p>The {@linkplain #getSubtype() subtype} is set to "&#42;", parameters empty. * @param type the primary type * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type) { super(type); } /** * Create a new {@code MediaType} for the given primary type and subtype. * <p>The parameters are empty. * @param type the primary type * @param subtype the subtype * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type, String subtype) { super(type, subtype, Collections.emptyMap()); } /** * Create a new {@code MediaType} for the given type, subtype, and character set. * @param type the primary type * @param subtype the subtype * @param charset the character set * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type, String subtype, Charset charset) { super(type, subtype, charset); } /** * Create a new {@code MediaType} for the given type, subtype, and quality value. * @param type the primary type * @param subtype the subtype * @param qualityValue the quality value * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type, String subtype, double qualityValue) { this(type, subtype, Collections.singletonMap(PARAM_QUALITY_FACTOR, Double.toString(qualityValue))); } /** * Copy-constructor that copies the type, subtype and parameters of the given * {@code MediaType}, and allows to set the specified character set. * @param other the other media type * @param charset the character set * @throws IllegalArgumentException if any of the parameters contain illegal characters * @since 4.3 */ public MediaType(MediaType other, Charset charset) { super(other, charset); } /** * Copy-constructor that copies the type and subtype of the given {@code MediaType}, * and allows for different parameter. * @param other the other media type * @param parameters the parameters, may be {@code null} * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(MediaType other, Map<String, String> parameters) { super(other.getType(), other.getSubtype(), parameters); } /** * Create a new {@code MediaType} for the given type, subtype, and parameters. * @param type the primary type * @param subtype the subtype * @param parameters the parameters, may be {@code null} * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type, String subtype, Map<String, String> parameters) { super(type, subtype, parameters); } @Override protected void checkParameters(String attribute, String value) { super.checkParameters(attribute, value); if (PARAM_QUALITY_FACTOR.equals(attribute)) { value = unquote(value); double d = Double.parseDouble(value); Assert.isTrue(d >= 0D && d <= 1D, "Invalid quality value \"" + value + "\": should be between 0.0 and 1.0"); } } /** * Return the quality value, as indicated by a {@code q} parameter, if any. * Defaults to {@code 1.0}. * @return the quality factory */ public double getQualityValue() { String qualityFactory = getParameter(PARAM_QUALITY_FACTOR); return (qualityFactory != null ? Double.parseDouble(unquote(qualityFactory)) : 1D); } /** * Indicate whether this {@code MediaType} includes the given media type. * <p>For instance, {@code text/*} includes {@code text/plain} and {@code text/html}, and {@code application/*+xml} * includes {@code application/soap+xml}, etc. This method is <b>not</b> symmetric. * @param other the reference media type with which to compare * @return {@code true} if this media type includes the given media type; {@code false} otherwise */ public boolean includes(MediaType other) { return super.includes(other); } /** * Indicate whether this {@code MediaType} is compatible with the given media type. * <p>For instance, {@code text/*} is compatible with {@code text/plain}, {@code text/html}, and vice versa. * In effect, this method is similar to {@link #includes(MediaType)}, except that it <b>is</b> symmetric. * @param other the reference media type with which to compare * @return {@code true} if this media type is compatible with the given media type; {@code false} otherwise */ public boolean isCompatibleWith(MediaType other) { return super.isCompatibleWith(other); } /** * Return a replica of this instance with the quality value of the given MediaType. * @return the same instance if the given MediaType doesn't have a quality value, or a new one otherwise */ public MediaType copyQualityValue(MediaType mediaType) { if (!mediaType.getParameters().containsKey(PARAM_QUALITY_FACTOR)) { return this; } Map<String, String> params = new LinkedHashMap<>(getParameters()); params.put(PARAM_QUALITY_FACTOR, mediaType.getParameters().get(PARAM_QUALITY_FACTOR)); return new MediaType(this, params); } /** * Return a replica of this instance with its quality value removed. * @return the same instance if the media type doesn't contain a quality value, or a new one otherwise */ public MediaType removeQualityValue() { if (!getParameters().containsKey(PARAM_QUALITY_FACTOR)) { return this; } Map<String, String> params = new LinkedHashMap<>(getParameters()); params.remove(PARAM_QUALITY_FACTOR); return new MediaType(this, params); } /** * Parse the given String value into a {@code MediaType} object, * with this method name following the 'valueOf' naming convention * (as supported by {@link org.springframework.core.convert.ConversionService}. * @param value the string to parse * @throws InvalidMediaTypeException if the media type value cannot be parsed * @see #parseMediaType(String) */ public static MediaType valueOf(String value) { return parseMediaType(value); } /** * Parse the given String into a single {@code MediaType}. * @param mediaType the string to parse * @return the media type * @throws InvalidMediaTypeException if the media type value cannot be parsed */ public static MediaType parseMediaType(String mediaType) { MimeType type; try { type = MimeTypeUtils.parseMimeType(mediaType); } catch (InvalidMimeTypeException ex) { throw new InvalidMediaTypeException(ex); } try { return new MediaType(type.getType(), type.getSubtype(), type.getParameters()); } catch (IllegalArgumentException ex) { throw new InvalidMediaTypeException(mediaType, ex.getMessage()); } } /** * Parse the given comma-separated string into a list of {@code MediaType} objects. * <p>This method can be used to parse an Accept or Content-Type header. * @param mediaTypes the string to parse * @return the list of media types * @throws InvalidMediaTypeException if the media type value cannot be parsed */ public static List<MediaType> parseMediaTypes(String mediaTypes) { if (!StringUtils.hasLength(mediaTypes)) { return Collections.emptyList(); } String[] tokens = StringUtils.tokenizeToStringArray(mediaTypes, ","); List<MediaType> result = new ArrayList<>(tokens.length); for (String token : tokens) { result.add(parseMediaType(token)); } return result; } /** * Parse the given list of (potentially) comma-separated strings into a * list of {@code MediaType} objects. * <p>This method can be used to parse an Accept or Content-Type header. * @param mediaTypes the string to parse * @return the list of media types * @throws InvalidMediaTypeException if the media type value cannot be parsed * @since 4.3.2 */ public static List<MediaType> parseMediaTypes(List<String> mediaTypes) { if (CollectionUtils.isEmpty(mediaTypes)) { return Collections.emptyList(); } else if (mediaTypes.size() == 1) { return parseMediaTypes(mediaTypes.get(0)); } else { List<MediaType> result = new ArrayList<>(8); for (String mediaType : mediaTypes) { result.addAll(parseMediaTypes(mediaType)); } return result; } } /** * Re-create the given mime types as media types. * @since 5.0 */ public static List<MediaType> asMediaTypes(List<MimeType> mimeTypes) { return mimeTypes.stream().map(MediaType::asMediaType).collect(Collectors.toList()); } /** * Re-create the given mime type as a media type. * @since 5.0 */ public static MediaType asMediaType(MimeType mimeType) { if (mimeType instanceof MediaType) { return (MediaType) mimeType; } return new MediaType(mimeType.getType(), mimeType.getSubtype(), mimeType.getParameters()); } /** * Return a string representation of the given list of {@code MediaType} objects. * <p>This method can be used to for an {@code Accept} or {@code Content-Type} header. * @param mediaTypes the media types to create a string representation for * @return the string representation */ public static String toString(Collection<MediaType> mediaTypes) { return MimeTypeUtils.toString(mediaTypes); } /** * Sorts the given list of {@code MediaType} objects by specificity. * <p>Given two media types: * <ol> * <li>if either media type has a {@linkplain #isWildcardType() wildcard type}, then the media type without the * wildcard is ordered before the other.</li> * <li>if the two media types have different {@linkplain #getType() types}, then they are considered equal and * remain their current order.</li> * <li>if either media type has a {@linkplain #isWildcardSubtype() wildcard subtype}, then the media type without * the wildcard is sorted before the other.</li> * <li>if the two media types have different {@linkplain #getSubtype() subtypes}, then they are considered equal * and remain their current order.</li> * <li>if the two media types have different {@linkplain #getQualityValue() quality value}, then the media type * with the highest quality value is ordered before the other.</li> * <li>if the two media types have a different amount of {@linkplain #getParameter(String) parameters}, then the * media type with the most parameters is ordered before the other.</li> * </ol> * <p>For example: * <blockquote>audio/basic &lt; audio/* &lt; *&#047;*</blockquote> * <blockquote>audio/* &lt; audio/*;q=0.7; audio/*;q=0.3</blockquote> * <blockquote>audio/basic;level=1 &lt; audio/basic</blockquote> * <blockquote>audio/basic == text/html</blockquote> * <blockquote>audio/basic == audio/wave</blockquote> * @param mediaTypes the list of media types to be sorted * @see <a href="http://tools.ietf.org/html/rfc7231#section-5.3.2">HTTP 1.1: Semantics * and Content, section 5.3.2</a> */ public static void sortBySpecificity(List<MediaType> mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, SPECIFICITY_COMPARATOR); } } /** * Sorts the given list of {@code MediaType} objects by quality value. * <p>Given two media types: * <ol> * <li>if the two media types have different {@linkplain #getQualityValue() quality value}, then the media type * with the highest quality value is ordered before the other.</li> * <li>if either media type has a {@linkplain #isWildcardType() wildcard type}, then the media type without the * wildcard is ordered before the other.</li> * <li>if the two media types have different {@linkplain #getType() types}, then they are considered equal and * remain their current order.</li> * <li>if either media type has a {@linkplain #isWildcardSubtype() wildcard subtype}, then the media type without * the wildcard is sorted before the other.</li> * <li>if the two media types have different {@linkplain #getSubtype() subtypes}, then they are considered equal * and remain their current order.</li> * <li>if the two media types have a different amount of {@linkplain #getParameter(String) parameters}, then the * media type with the most parameters is ordered before the other.</li> * </ol> * @param mediaTypes the list of media types to be sorted * @see #getQualityValue() */ public static void sortByQualityValue(List<MediaType> mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, QUALITY_VALUE_COMPARATOR); } } /** * Sorts the given list of {@code MediaType} objects by specificity as the * primary criteria and quality value the secondary. * @see MediaType#sortBySpecificity(List) * @see MediaType#sortByQualityValue(List) */ public static void sortBySpecificityAndQuality(List<MediaType> mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, new CompoundComparator<>( MediaType.SPECIFICITY_COMPARATOR, MediaType.QUALITY_VALUE_COMPARATOR)); } } /** * Comparator used by {@link #sortByQualityValue(List)}. */ public static final Comparator<MediaType> QUALITY_VALUE_COMPARATOR = new Comparator<MediaType>() { @Override public int compare(MediaType mediaType1, MediaType mediaType2) { double quality1 = mediaType1.getQualityValue(); double quality2 = mediaType2.getQualityValue(); int qualityComparison = Double.compare(quality2, quality1); if (qualityComparison != 0) { return qualityComparison; // audio/*;q=0.7 < audio/*;q=0.3 } else if (mediaType1.isWildcardType() && !mediaType2.isWildcardType()) { // */* < audio/* return 1; } else if (mediaType2.isWildcardType() && !mediaType1.isWildcardType()) { // audio/* > */* return -1; } else if (!mediaType1.getType().equals(mediaType2.getType())) { // audio/basic == text/html return 0; } else { // mediaType1.getType().equals(mediaType2.getType()) if (mediaType1.isWildcardSubtype() && !mediaType2.isWildcardSubtype()) { // audio/* < audio/basic return 1; } else if (mediaType2.isWildcardSubtype() && !mediaType1.isWildcardSubtype()) { // audio/basic > audio/* return -1; } else if (!mediaType1.getSubtype().equals(mediaType2.getSubtype())) { // audio/basic == audio/wave return 0; } else { int paramsSize1 = mediaType1.getParameters().size(); int paramsSize2 = mediaType2.getParameters().size(); return (paramsSize2 < paramsSize1 ? -1 : (paramsSize2 == paramsSize1 ? 0 : 1)); // audio/basic;level=1 < audio/basic } } } }; /** * Comparator used by {@link #sortBySpecificity(List)}. */ public static final Comparator<MediaType> SPECIFICITY_COMPARATOR = new SpecificityComparator<MediaType>() { @Override protected int compareParameters(MediaType mediaType1, MediaType mediaType2) { double quality1 = mediaType1.getQualityValue(); double quality2 = mediaType2.getQualityValue(); int qualityComparison = Double.compare(quality2, quality1); if (qualityComparison != 0) { return qualityComparison; // audio/*;q=0.7 < audio/*;q=0.3 } return super.compareParameters(mediaType1, mediaType2); } }; }
package azkaban.execapp.event; import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT; import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_CONNECTION_TIMEOUT; import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_RESPONSE_WAIT_TIMEOUT; import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_SOCKET_TIMEOUT; import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_THREAD_POOL_SIZE; import azkaban.utils.Props; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.SocketTimeoutException; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.ResponseHandler; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.impl.client.FutureRequestExecutionMetrics; import org.apache.http.impl.client.FutureRequestExecutionService; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpRequestFutureTask; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Responsible for making the job callback HTTP requests. * * One of the requirements is to log out the request information and response using the given * logger, which should be the job logger. * * @author hluu */ public class JobCallbackRequestMaker { private static final Logger logger = LoggerFactory.getLogger(JobCallbackRequestMaker.class); private static final int DEFAULT_TIME_OUT_MS = 3000; private static final int DEFAULT_RESPONSE_WAIT_TIME_OUT_MS = 5000; private static final int MAX_RESPONSE_LINE_TO_PRINT = 50; private static final int DEFAULT_THREAD_POOL_SIZE = 10; private static JobCallbackRequestMaker instance; private static boolean isInitialized = false; private final FutureRequestExecutionService futureRequestExecutionService; private int responseWaitTimeoutMS = -1; private JobCallbackRequestMaker(final Props props) { final int connectionRequestTimeout = props.getInt(JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT, DEFAULT_TIME_OUT_MS); final int connectionTimeout = props.getInt(JOBCALLBACK_CONNECTION_TIMEOUT, DEFAULT_TIME_OUT_MS); final int socketTimeout = props.getInt(JOBCALLBACK_SOCKET_TIMEOUT, DEFAULT_TIME_OUT_MS); this.responseWaitTimeoutMS = props.getInt(JOBCALLBACK_RESPONSE_WAIT_TIMEOUT, DEFAULT_RESPONSE_WAIT_TIME_OUT_MS); logger.info("responseWaitTimeoutMS: " + this.responseWaitTimeoutMS); final RequestConfig requestConfig = RequestConfig.custom() .setConnectionRequestTimeout(connectionRequestTimeout) .setConnectTimeout(connectionTimeout) .setSocketTimeout(socketTimeout).build(); logger.info("Global request configuration " + requestConfig.toString()); final HttpClient httpClient = HttpClientBuilder.create().setDefaultRequestConfig(requestConfig) .build(); final int jobCallbackThreadPoolSize = props.getInt(JOBCALLBACK_THREAD_POOL_SIZE, DEFAULT_THREAD_POOL_SIZE); logger.info("Jobcall thread pool size: " + jobCallbackThreadPoolSize); final ExecutorService executorService = Executors.newFixedThreadPool(jobCallbackThreadPoolSize); this.futureRequestExecutionService = new FutureRequestExecutionService(httpClient, executorService); } public static void initialize(final Props props) { if (props == null) { throw new NullPointerException("props argument can't be null"); } if (isInitialized) { return; } instance = new JobCallbackRequestMaker(props); isInitialized = true; logger.info("Initialization for " + JobCallbackRequestMaker.class.getName() + " is completed"); } public static boolean isInitialized() { return isInitialized; } public static JobCallbackRequestMaker getInstance() { if (!isInitialized) { throw new IllegalStateException(JobCallbackRequestMaker.class.getName() + " hasn't initialzied"); } return instance; } public FutureRequestExecutionMetrics getJobcallbackMetrics() { return this.futureRequestExecutionService.metrics(); } public void makeHttpRequest(final String jobId, final Logger logger, final List<HttpRequestBase> httpRequestList) { if (httpRequestList == null || httpRequestList.isEmpty()) { logger.info("No HTTP requests to make"); return; } for (final HttpRequestBase httpRequest : httpRequestList) { logger.debug("Job callback http request: " + httpRequest.toString()); logger.debug("headers ["); for (final Header header : httpRequest.getAllHeaders()) { logger.debug(String.format(" %s : %s", header.getName(), header.getValue())); } logger.debug("]"); final HttpRequestFutureTask<Integer> task = this.futureRequestExecutionService.execute(httpRequest, HttpClientContext.create(), new LoggingResponseHandler(logger)); try { // get with timeout final Integer statusCode = task.get(this.responseWaitTimeoutMS, TimeUnit.MILLISECONDS); logger.info("http callback status code: " + statusCode); } catch (final TimeoutException timeOutEx) { logger .warn("Job callback target took longer " + (this.responseWaitTimeoutMS / 1000) + " seconds to respond", timeOutEx); } catch (final ExecutionException ee) { if (ee.getCause() instanceof SocketTimeoutException) { logger.warn("Job callback target took longer " + (this.responseWaitTimeoutMS / 1000) + " seconds to respond", ee); } else { logger.warn( "Encountered error while waiting for job callback to complete", ee); } } catch (final Throwable e) { logger.warn( "Encountered error while waiting for job callback to complete for: " + jobId, e.getMessage()); } } } /** * Response handler for logging job callback response using the given logger instance * * @author hluu */ private static final class LoggingResponseHandler implements ResponseHandler<Integer> { private final Logger logger; public LoggingResponseHandler(final Logger logger) { if (logger == null) { throw new NullPointerException("Argument logger can't be null"); } this.logger = logger; } @Override public Integer handleResponse(final HttpResponse response) throws ClientProtocolException, IOException { final int statusCode = response.getStatusLine().getStatusCode(); BufferedReader bufferedReader = null; try { final HttpEntity responseEntity = response.getEntity(); if (responseEntity != null) { bufferedReader = new BufferedReader(new InputStreamReader( responseEntity.getContent(), StandardCharsets.UTF_8)); String line = ""; int lineCount = 0; this.logger.info("HTTP response ["); while ((line = bufferedReader.readLine()) != null) { this.logger.info(line); lineCount++; if (lineCount > MAX_RESPONSE_LINE_TO_PRINT) { break; } } this.logger.info("]"); } else { this.logger.info("No response"); } } catch (final Throwable t) { this.logger.warn( "Encountered error while logging out job callback response", t); } finally { if (bufferedReader != null) { try { bufferedReader.close(); } catch (final IOException ex) { // don't care } } } return statusCode; } } }
package org.develnext.jphp.parser.classes; import org.develnext.jphp.core.tokenizer.Tokenizer; import org.develnext.jphp.core.tokenizer.token.stmt.*; import org.develnext.jphp.parser.ParserExtension; import php.runtime.Memory; import php.runtime.annotation.Reflection.Namespace; import php.runtime.annotation.Reflection.Signature; import php.runtime.env.Environment; import php.runtime.memory.ArrayMemory; import php.runtime.reflection.ClassEntity; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; @Namespace(ParserExtension.NS) public class ModuleRecord extends AbstractSourceRecord { protected Map<String, NamespaceRecord> namespaces = new LinkedHashMap<>(); public ModuleRecord(Environment env) { super(env); } public ModuleRecord(Environment env, ClassEntity clazz) { super(env, clazz); } @Signature public Collection<NamespaceRecord> getNamespaces() { return namespaces.values(); } @Signature public Memory getClasses() { ArrayMemory result = ArrayMemory.createHashed(); for (NamespaceRecord record : namespaces.values()) { Collection<ClassRecord> classes = record.getClasses(); for (ClassRecord aClass : classes) { result.put(aClass.getName(), aClass); } } return result.toConstant(); } @Signature public Memory getFunctions() { ArrayMemory result = ArrayMemory.createHashed(); for (NamespaceRecord record : namespaces.values()) { Collection<MethodRecord> functions = record.getFunctions(); for (MethodRecord function : functions) { result.put(function.getName(), function); } } return result.toConstant(); } @Signature public ClassRecord findClass(String name) { for (NamespaceRecord record : namespaces.values()) { ClassRecord classRecord = record.getClass(name); if (classRecord != null) { return classRecord; } } return null; } @Signature public MethodRecord findFunction(String name) { return findMethod(name); } @Signature public MethodRecord findMethod(String name) { for (NamespaceRecord record : namespaces.values()) { MethodRecord function = record.getFunction(name); if (function != null) { return function; } } return null; } public NamespaceRecord synchronize(Environment env, NamespaceStmtToken namespace) { NamespaceRecord namespaceRecord; if (namespace != null && namespace.getName() != null) { namespaceRecord = namespaces.get(namespace.getName().toName().toLowerCase()); } else { namespaceRecord = namespaces.get(null); } if (namespaceRecord == null) { namespaceRecord = new NamespaceRecord(env); namespaceRecord.setModuleRecord(this); namespaceRecord.setToken(namespace); namespaces.put(namespaceRecord.getIdName(), namespaceRecord); } return namespaceRecord; } public ClassRecord synchronize(Environment env, ClassStmtToken token, Tokenizer tokenizer) { NamespaceRecord namespaceRecord = synchronize(env, token.getNamespace()); ClassRecord classRecord = namespaceRecord.getClass(token.getName().getName()); if (classRecord == null) { classRecord = new ClassRecord(env); classRecord.setToken(token); } classRecord.setComment(token.getDocComment() == null ? null : token.getDocComment().getComment()); classRecord.setName(token.getFulledName()); classRecord.setIsAbstract(token.isAbstract()); if (token.getExtend() != null) { ClassRecord parent = new ClassRecord(env); parent.setName(token.getExtend().getName().getName()); classRecord.setParent(parent); } for (MethodStmtToken methodStmtToken : token.getMethods()) { synchronize(env, methodStmtToken, classRecord, tokenizer); } for (ClassVarStmtToken varStmtToken : token.getProperties()) { synchronize(env, varStmtToken, classRecord, tokenizer); } namespaceRecord.addClass(classRecord); return classRecord; } public void synchronize(Environment env, FunctionStmtToken token, Tokenizer tokenizer) { NamespaceRecord namespace = synchronize(env, token.getNamespace()); MethodRecord methodRecord = namespace.getFunction(token.getFulledName()); if (methodRecord == null) { methodRecord = new MethodRecord(env); methodRecord.setToken(token); } methodRecord.setName(token.getFulledName()); methodRecord.setComment(token.getDocComment() == null ? null : token.getDocComment().getComment()); namespace.addFunction(methodRecord); } public void synchronize(Environment env, ClassVarStmtToken varStmtToken, ClassRecord classRecord, Tokenizer tokenizer) { PropertyRecord classVar = classRecord.getClassVar(varStmtToken.getVariable().getName()); if (classVar == null) { classVar = new PropertyRecord(env); classVar.setToken(varStmtToken); } classVar.setName(varStmtToken.getVariable().getName()); classVar.setComment(varStmtToken.getDocComment() == null ? null : varStmtToken.getDocComment().getComment()); classVar.setStatic(varStmtToken.isStatic()); classRecord.addClassVar(classVar); } public void synchronize(Environment env, MethodStmtToken token, ClassRecord classRecord, Tokenizer tokenizer) { MethodRecord methodRecord = classRecord.getMethod(token.getName().getName()); if (methodRecord == null) { methodRecord = new MethodRecord(env); methodRecord.setToken(token); } methodRecord.setName(token.getName().getName()); methodRecord.setComment(token.getDocComment() == null ? null : token.getDocComment().getComment()); methodRecord.setStatic(token.isStatic()); methodRecord.setAbstract(token.isAbstract()); methodRecord.setFinal(token.isFinal()); methodRecord.setReturnTypeHint(token.getReturnHintType()); methodRecord.setReturnTypeHintClass(token.getReturnHintTypeClass() == null ? null : token.getReturnHintTypeClass().getName()); methodRecord.setArgumentRecords(new ArrayList<>(token.getArguments().size())); for (ArgumentStmtToken stmtToken : token.getArguments()) { methodRecord.getArgumentRecords().add(new ArgumentRecord(env, stmtToken)); } classRecord.addMethod(methodRecord); } public void synchronize(Environment env, ConstStmtToken token, Tokenizer tokenizer) { } @Override @Signature public void clear() { super.clear(); namespaces.clear(); } }
/*-------------------------------------------------------- 1. Name / Date: Paul Hester 02/08/2015 2. Java version used, if not the official version for the class: java version "1.8.0_25" Java(TM) SE Runtime Environment (build 1.8.0_25-b18) Java HotSpot(TM) 64-Bit Server VM (build 25.25-b02, mixed mode) 3. Precise command-line compilation examples / instructions: > javac MyWebServer.java 4. Precise examples / instructions to run this program: > java MyWebServer 5. List of files needed for running the program. a. checklist.html b. MyWebServer.java 5. Notes: Code was developed on a Windows 7 machine with Eclipse IDE. ----------------------------------------------------------*/ import java.io.*; import java.net.*; import java.nio.file.Files; import java.nio.file.Paths; import java.util.*; public class MyWebServer { @SuppressWarnings("resource") public static void main(String[] args) { DumpStartupInformation(); ServerSocket servsock = null; try { servsock = new ServerSocket(2540, 100); } catch (IOException e) { System.out.println("Cannot initialize ServerSocket."); System.exit(1); } Socket socket; while (true) { System.out.println("Ready for connection..."); try { socket = servsock.accept(); } catch (IOException e) { continue; } System.out.println("Has connection. Beginning to process..."); new ClientHandler(socket).start(); } } public static void DumpStartupInformation() { StringBuilder b = new StringBuilder(); b.append("---------------------------------------------------------\n"); b.append("Paul Hester's Web Server Starting\n"); b.append("Port: 2540\n"); b.append("Development Environment: Windows 7 and Eclipse IDE\n"); b.append("---------------------------------------------------------\n"); System.out.println(b.toString()); Logger.getInstance().write(b.toString()); } } /** * @author Paul Hester * @description Handles each connecting client on a new thread. */ class ClientHandler extends Thread { Socket sock; public ClientHandler(Socket socket) { this.sock = socket; } public void run() { PrintStream out = null; BufferedReader in = null; try { in = new BufferedReader( new InputStreamReader(sock.getInputStream())); out = new PrintStream(sock.getOutputStream()); // Parse first line - See what type of request. For this assignment, we're not concerned with other headers. HTTPRequest request = HTTPParser.GetRequest(in.readLine()); HTTPResponse response = HTTPHandler.ProcessRequest(request); // Generate Response. out.print(HTTPHandler.GetHTTPResponseString(response)); out.flush(); } catch (IOException e) { System.out.println("Failure trying to handle client"); } finally { try { this.sock.close(); } catch (IOException e) { System.out.println("Cannot close client socket."); } } } } /** * @author Paul Hester * @description HTTPRequest used for processing. */ class HTTPRequest { String Method; String Resource; String Protocol; String Message; HashMap<String, String> Headers; HashMap<String, String> Parameters; boolean Invalid; } /** * @author Paul Hester * @description HTTPResponse used for processing. */ class HTTPResponse { int StatusCode; int ContentLength; String MIME; String Body; } /** * @author Paul Hester * @description Used to create an HTTPRequest object for use. */ class HTTPParser { public static HTTPRequest GetRequest(String request) { // I. Split first request line by " " to get three important items. Method, Resource, Protocol // II. If we do not find the three items needed, request is bad. // III. Setup HTTPRequest object with required values. // IV. Check to see if method is supported. // I. String[] segments = request.split(" "); HTTPRequest r = new HTTPRequest(); // II. if (segments.length != 3) { r.Invalid = true; r.Message = "Method, Protocol Version or Resource is missing."; return r; } // III. r.Method = segments[0]; r.Resource = segments[1]; r.Protocol = segments[2]; r.Parameters = HTTPParser.ParseQueryString(r.Resource); // VI. if (!HTTPValidator.getInstance().IsMethodSupported(r.Method)) { r.Invalid = true; r.Message = "Method is not supported."; } return r; } public static HashMap<String, String> ParseQueryString(String resource) { // I. ? signifies a query string is being passed. If none, return null; // II. Extract query string from resource request. // III. Split query string key value pairs by &. // IV. Fill HashMap with pairs. // V. Return HashMap. // I. int start = resource.indexOf('?'); if (start == -1) return null; // II. int end = resource.length(); String query = resource.substring(start + 1, end); // III. String[] pairs = query.split("&"); // IV. HashMap<String, String> kv = new HashMap<String, String>(); for (int i = 0; i < pairs.length; i++) { String[] segments = pairs[i].split("="); if (segments.length == 2) kv.put(segments[0], segments[1]); } // V. return kv; } } /** * @author Paul Hester * @description Fake CGI addnums method. */ class FakeCGI { public static int AddNumbers(int num1, int num2) { return num1 + num2; } } /** * @author Paul Hester * @description Fake CGI addnums method. */ class HTTPValidator { private static HTTPValidator instance; private HashSet<String> methods; // O(1) Add and Contains private HashSet<String> protocols; // O(1) Add and Contains public HTTPValidator() { // I. Setup supported methods. // II. Setup supported protocols. // I. this.methods = new HashSet<String>(); this.methods.add("GET"); this.methods.add("POST"); // II. this.protocols = new HashSet<String>(); this.protocols.add("HTTP/1.1"); } public static synchronized HTTPValidator getInstance() { if (instance == null) { instance = new HTTPValidator(); } return instance; } public boolean IsMethodSupported(String method) { if (this.methods.contains(method)) return true; return false; } public boolean IsProtocolSupported(String protocol) { if (this.methods.contains(protocol)) return true; return false; } } /** * @author Paul Hester * @description Will process the HTTPRequest and create a response. */ class HTTPHandler { public static HTTPResponse ProcessRequest(HTTPRequest request) { // I. Dump request information to log. // II. Determine what the request is for. File, Directory listing, Fake CGI. // II. A. File is found. Read contents and process. // II. B. Directory is found. Create directory listing HTML response. // II. C. Handle Fake CGI request. // II. D. Still haven't found what you're looking for. // III. Dump response information. // IV. Return response. // I. Logger.getInstance().write("Beginning to process request"); Logger.getInstance().write("Request Method", request.Method); Logger.getInstance().write("Request Resource", request.Resource); System.out.println("Processing request for " + request.Resource); HTTPResponse response = new HTTPResponse(); // II. if (FileProvider.IsFileAndExists(request.Resource)) { // II A. Logger.getInstance().write("Resource is a FILE."); HTTPHandler.ProcessFile(request.Resource, response); } else if (FileProvider.IsDirectoryAndExists(request.Resource)) { // II B. Logger.getInstance().write("Resource is a DIRECTORY."); HTTPHandler.ProcessDirectory(request.Resource, response); } else { String ext = FileProvider.GetExtension(request.Resource); if (ext != null && ext.equals("fake-cgi")) { Logger.getInstance().write("Resource is Fake CGI."); HTTPHandler.ProcessCGI(request, response); } else { // II D. Logger.getInstance().write("Resource NOT FOUND."); HTTPHandler.Process404(response); } } response.ContentLength = response.Body.length(); // III. Logger.getInstance().write("Response Content Type", response.MIME); Logger.getInstance().write("Response Content Length", String.valueOf(response.ContentLength)); Logger.getInstance().write("Response Body", String.valueOf(response.Body)); // IV. return response; } public static void ProcessFile(String resource, HTTPResponse response) { // I. Get file extension to return proper content-type. // II. Read file contents if not cached. // III. Setup MIME based on extension. If MIME lookup is bad, just use plain txt. // IV. Setup status code. // I. String extension = FileProvider.GetExtension(resource); // II. response.Body = FileProvider.ReadFile(resource); // III. response.MIME = Server.getInstance().GetMIME(extension); if (response.MIME == null) response.MIME = Server.getInstance().GetMIME("txt"); // IV. response.StatusCode = 200; } public static void ProcessCGI(HTTPRequest request, HTTPResponse response) { // I. Parse key value pairs from query. // II. Setup variables for processing. TODO: If cannot find any needed values, 403. // III. Format body with values for response. // IV. Get MIME for content type return. // V. Setup response content length. // I. if (request.Parameters != null) { // II. try { String person = request.Parameters.get("person"); int num1 = Integer.parseInt(request.Parameters.get("num1")); int num2 = Integer.parseInt(request.Parameters.get("num2")); int total = FakeCGI.AddNumbers(num1, num2); // III. response.Body = String.format( "Dear %s, the sum of %d and %d is %d.", person, num1, num2, total); } catch (Exception e) { response.Body = "<h1>Could not execute addnums function.</h1>"; } } else response.Body = "<h1>Query string parameters cannot be mapped.</h1>"; // IV. response.MIME = Server.getInstance().GetMIME( FileProvider.GetExtension(request.Resource)); // V. response.ContentLength = response.Body.length(); } public static void Process404(HTTPResponse response) { // I. Setup 404. // I. response.StatusCode = 404; response.Body = "<h1>Resource Not Found.</h1>"; response.MIME = Server.getInstance().GetMIME("html"); response.ContentLength = response.Body.length(); } public static void ProcessDirectory(String resource, HTTPResponse response) { // I. Directory lists are being built. // II. MIME will be HTML to list contents. // III. Response success // I. response.Body = HTTPHandler.GetDirectoryListing(resource); // II. response.MIME = Server.getInstance().GetMIME("html"); // III. response.StatusCode = 200; } public static String GetHTTPResponseString(HTTPResponse r) { // TODO: Implement status message. return String .format("HTTP/1.1 %d \r\nContent-Length: %d\r\nContent-Type: %s\r\n\r\n%s", r.StatusCode, r.ContentLength, r.MIME, r.Body); } public static String GetDirectoryListing(String directory) { // I. Get list of directory items. // II. Handle / request. Create link format. // III. Generate HTML to display list. UL is used as element. // IV. Return HTML. StringBuilder b = new StringBuilder(); // I. if (directory.contains("../")) // Stop traversal. Show only root. directory = "/"; File[] fd = FileProvider.GetDirectoryContents(directory); // II. String format = (directory.equals("/")) ? "<li><a href=\"%s%s\">%s</a></li>" : "<li><a href=\"%s/%s\">%s</a></li>"; // III. b.append(String.format("<h1>Index of %s</h1>", directory)); b.append("<ul>"); for (int i = 0; i < fd.length; i++) { String name = fd[i].getName(); b.append(String.format(format, directory, name, name)); } b.append("</ul>"); // IV. return b.toString(); } } /** * @author Paul Hester * @description Handle all file processing. */ class FileProvider { public static boolean IsFileAndExists(String resource) { // TODO: Guard again directory traversal File f = new File(Server.getInstance().GetRootPath() + resource); return (f.exists() && !f.isDirectory()) ? true : false; } public static boolean IsDirectoryAndExists(String resource) { // TODO: Guard againt directory traversal File f = new File(Server.getInstance().GetRootPath() + resource); return (f.exists() && f.isDirectory()) ? true : false; } public static String ReadFile(String name) { try { File item = new File(Server.getInstance().GetRootPath() + name); return readFile(item.getCanonicalPath()); } catch (IOException e) { System.out.println("FILE DOESN'T EXIST"); return null; } } public static String readFile(String path) { byte[] encoded; try { encoded = Files.readAllBytes(Paths.get(path)); return new String(encoded, "UTF8"); } catch (IOException e) { return null; } } public static String GetExtension(String resource) { String ext = null; int extension = resource.lastIndexOf('.'); int query = resource.indexOf('?'); if (query >= 0 && extension >= 0) { ext = resource.substring(extension + 1, query); } else if (extension >= 0 && query <= 0) { ext = resource.substring(extension + 1, resource.length()); } return ext; } public static File[] GetDirectoryContents(String resource) { File d = new File(Server.getInstance().GetRootPath() + resource); return d.listFiles(); } public static File GetParentDirectory(String resource) { File d = new File(Server.getInstance().GetRootPath() + resource); return d.getParentFile(); } } /** * @author Paul Hester * @description Server singleton that has information about server supported * items. */ class Server { private HashMap<String, String> mimeTypes; private HashMap<String, String> recentlyRequestedItems; private static Server instance; public String root; public Server() { // I. Create map of supported MIME types. // II. Create cache for files requested that haven't changed. Not fully implemented. // III. Setup root path. // I. this.mimeTypes = new HashMap<String, String>(); this.mimeTypes.put("txt", "text/plain"); this.mimeTypes.put("html", "text/html"); this.mimeTypes.put("ico", "image/x-icon"); this.mimeTypes.put("fake-cgi", "text/html"); this.mimeTypes.put("java", "text/plain"); // II. this.recentlyRequestedItems = new HashMap<String, String>(); // III. try { this.root = new File(".").getCanonicalPath(); } catch (IOException e) { System.out .println("Cannot map location to server files from... Process is exiting..."); System.exit(0); } } public static synchronized Server getInstance() { if (instance == null) { instance = new Server(); } return instance; } public String GetMIME(String ext) { if (ext != null) { return this.mimeTypes.get(ext.toLowerCase()); } return this.mimeTypes.get("txt"); } public String GetCachedItem(String resource) { return this.recentlyRequestedItems.get(resource); } public String PutItemInCache(String resource, String body) { return this.recentlyRequestedItems.put(resource, body); } public String GetRootPath() { return this.root; } } /** * @author Paul Hester * @description A singleton to be used for logging. Each entry is recorded with * epoch time and the id of current thread. */ class Logger { private static Logger instance; private PrintWriter printWriter; private FileWriter fileWriter; private File file; public Logger() { // I. Init objects needed to log to JokeOutput.txt file. If an exception is thrown, // just display warning and not bring down the entire server try { // I. file = new File("serverlog.txt"); fileWriter = new FileWriter(file, true); printWriter = new PrintWriter(fileWriter, true); } catch (Exception e) { System.out .println("WARNING... Logging will not working for this session."); file = null; fileWriter = null; printWriter = null; } } public static synchronized Logger getInstance() { // I. Create thread-safe instance of singleton Logger. // I. if (instance == null) { instance = new Logger(); } return instance; } public synchronized void write(String message) { // I. Write message using standard thread id, epoch time format. // I. if (this.printWriter != null) printWriter.printf("%s %s - %s\r\n", System.currentTimeMillis(), Thread.currentThread().getId(), message); } public synchronized void write(String prefix, String message) { // I. Write message using standard thread id, epoch time format, prefix and then message. // I. if (this.printWriter != null) printWriter.printf("%s %s - %s %s\r\n", System.currentTimeMillis(), Thread.currentThread().getId(), prefix, message); } public void close() { try { if (this.fileWriter != null) fileWriter.close(); if (this.printWriter != null) { printWriter.flush(); printWriter.close(); } } catch (Exception e) { System.out .println("WARNING... Was not able to close and flush PrintWriter."); } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.support.events; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import static org.mockito.Mockito.withSettings; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.InOrder; import org.mockito.Mockito; import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.StubDriver; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebDriver.Navigation; import org.openqa.selenium.WebElement; import org.openqa.selenium.internal.WrapsDriver; import org.openqa.selenium.internal.WrapsElement; import java.util.ArrayList; import java.util.HashMap; /** * @author Michael Tamm */ @RunWith(JUnit4.class) public class EventFiringWebDriverTest { @Test public void navigationEvents() { final WebDriver mockedDriver = mock(WebDriver.class); final Navigation mockedNavigation = mock(Navigation.class); final StringBuilder log = new StringBuilder(); when(mockedDriver.navigate()).thenReturn(mockedNavigation); EventFiringWebDriver testedDriver = new EventFiringWebDriver(mockedDriver).register(new AbstractWebDriverEventListener() { @Override public void beforeNavigateTo(String url, WebDriver driver) { log.append("beforeNavigateTo ").append(url).append("\n"); } @Override public void afterNavigateTo(String url, WebDriver driver) { log.append("afterNavigateTo ").append(url).append("\n"); } @Override public void beforeNavigateBack(WebDriver driver) { log.append("beforeNavigateBack\n"); } @Override public void afterNavigateBack(WebDriver driver) { log.append("afterNavigateBack\n"); } @Override public void beforeNavigateForward(WebDriver driver) { log.append("beforeNavigateForward\n"); } @Override public void afterNavigateForward(WebDriver driver) { log.append("afterNavigateForward\n"); } @Override public void beforeNavigateRefresh(WebDriver driver) { log.append("beforeNavigateRefresh\n"); } @Override public void afterNavigateRefresh(WebDriver driver) { log.append("afterNavigateRefresh\n"); } }); testedDriver.get("http://www.get.com"); testedDriver.navigate().to("http://www.navigate-to.com"); testedDriver.navigate().back(); testedDriver.navigate().forward(); testedDriver.navigate().refresh(); assertEquals( "beforeNavigateTo http://www.get.com\n" + "afterNavigateTo http://www.get.com\n" + "beforeNavigateTo http://www.navigate-to.com\n" + "afterNavigateTo http://www.navigate-to.com\n" + "beforeNavigateBack\n" + "afterNavigateBack\n" + "beforeNavigateForward\n" + "afterNavigateForward\n" + "beforeNavigateRefresh\n" + "afterNavigateRefresh\n", log.toString()); InOrder order = Mockito.inOrder(mockedDriver, mockedNavigation); order.verify(mockedDriver).get("http://www.get.com"); order.verify(mockedNavigation).to("http://www.navigate-to.com"); order.verify(mockedNavigation).back(); order.verify(mockedNavigation).forward(); order.verify(mockedNavigation).refresh(); order.verifyNoMoreInteractions(); } @Test public void clickEvent() { final WebDriver mockedDriver = mock(WebDriver.class); final WebElement mockedElement = mock(WebElement.class); final StringBuilder log = new StringBuilder(); when(mockedDriver.findElement(By.name("foo"))).thenReturn(mockedElement); EventFiringWebDriver testedDriver = new EventFiringWebDriver(mockedDriver).register(new AbstractWebDriverEventListener() { @Override public void beforeClickOn(WebElement element, WebDriver driver) { log.append("beforeClickOn\n"); } @Override public void afterClickOn(WebElement element, WebDriver driver) { log.append("afterClickOn\n"); } }); testedDriver.findElement(By.name("foo")).click(); assertEquals( "beforeClickOn\n" + "afterClickOn\n", log.toString()); InOrder order = Mockito.inOrder(mockedDriver, mockedElement); order.verify(mockedDriver).findElement(By.name("foo")); order.verify(mockedElement).click(); order.verifyNoMoreInteractions(); } @Test public void changeValueEvent() { final WebDriver mockedDriver = mock(WebDriver.class); final WebElement mockedElement = mock(WebElement.class); final StringBuilder log = new StringBuilder(); when(mockedDriver.findElement(By.name("foo"))).thenReturn(mockedElement); EventFiringWebDriver testedDriver = new EventFiringWebDriver(mockedDriver).register(new AbstractWebDriverEventListener() { @Override public void beforeChangeValueOf(WebElement element, WebDriver driver) { log.append("beforeChangeValueOf\n"); } @Override public void afterChangeValueOf(WebElement element, WebDriver driver) { log.append("afterChangeValueOf\n"); } }); testedDriver.findElement(By.name("foo")).clear(); testedDriver.findElement(By.name("foo")).sendKeys("some text"); testedDriver.findElement(By.name("foo")).click(); assertEquals( "beforeChangeValueOf\n" + "afterChangeValueOf\n" + "beforeChangeValueOf\n" + "afterChangeValueOf\n", log.toString()); InOrder order = Mockito.inOrder(mockedElement); order.verify(mockedElement).clear(); order.verify(mockedElement).sendKeys("some text"); order.verify(mockedElement).click(); order.verifyNoMoreInteractions(); verify(mockedDriver, times(3)).findElement(By.name("foo")); verifyNoMoreInteractions(mockedDriver); } @Test public void findByEvent() { final WebDriver mockedDriver = mock(WebDriver.class); final WebElement mockedElement = mock(WebElement.class); final WebElement mockedChildElement = mock(WebElement.class); final StringBuilder log = new StringBuilder(); when(mockedDriver.findElement(By.id("foo"))).thenReturn(mockedElement); when(mockedElement.findElement(Mockito.<By>any())).thenReturn(mockedChildElement); EventFiringWebDriver testedDriver = new EventFiringWebDriver(mockedDriver).register(new AbstractWebDriverEventListener() { @Override public void beforeFindBy(By by, WebElement element, WebDriver driver) { log.append("beforeFindBy from ").append(element == null ? "WebDriver" : "WebElement") .append(" ").append(by).append("\n"); } @Override public void afterFindBy(By by, WebElement element, WebDriver driver) { log.append("afterFindBy from ").append(element == null ? "WebDriver" : "WebElement") .append(" ").append(by).append("\n"); } }); WebElement element = testedDriver.findElement(By.id("foo")); element.findElement(By.linkText("bar")); element.findElements(By.name("xyz")); testedDriver.findElements(By.xpath("//link[@type = 'text/css']")); assertEquals( "beforeFindBy from WebDriver By.id: foo\n" + "afterFindBy from WebDriver By.id: foo\n" + "beforeFindBy from WebElement By.linkText: bar\n" + "afterFindBy from WebElement By.linkText: bar\n" + "beforeFindBy from WebElement By.name: xyz\n" + "afterFindBy from WebElement By.name: xyz\n" + "beforeFindBy from WebDriver By.xpath: //link[@type = 'text/css']\n" + "afterFindBy from WebDriver By.xpath: //link[@type = 'text/css']\n", log.toString()); InOrder order = Mockito.inOrder(mockedElement, mockedDriver); order.verify(mockedElement).findElement(By.linkText("bar")); order.verify(mockedElement).findElements(By.name("xyz")); order.verify(mockedDriver).findElements(By.xpath("//link[@type = 'text/css']")); order.verifyNoMoreInteractions(); } @Test public void shouldCallListenersWhenAnExceptionIsThrown() { final WebDriver mockedDriver = mock(WebDriver.class); final StringBuilder log = new StringBuilder(); final NoSuchElementException exception = new NoSuchElementException("argh"); when(mockedDriver.findElement(By.id("foo"))).thenThrow(exception); EventFiringWebDriver testedDriver = new EventFiringWebDriver(mockedDriver).register(new AbstractWebDriverEventListener() { @Override public void onException(Throwable throwable, WebDriver driver) { log.append(throwable.getMessage()); } }); try { testedDriver.findElement(By.id("foo")); fail("Expected exception to be propagated"); } catch (NoSuchElementException e) { // Fine } assertEquals(exception.getMessage(), log.toString()); } @Test public void shouldUnpackElementArgsWhenCallingScripts() { final WebDriver mockedDriver = mock(WebDriver.class, withSettings().extraInterfaces(JavascriptExecutor.class)); final WebElement stubbedElement = mock(WebElement.class); when(mockedDriver.findElement(By.id("foo"))).thenReturn(stubbedElement); EventFiringWebDriver testedDriver = new EventFiringWebDriver(mockedDriver); testedDriver.register(new AbstractWebDriverEventListener() {}); WebElement element = testedDriver.findElement(By.id("foo")); testedDriver.executeScript("foo", element); verify((JavascriptExecutor) mockedDriver).executeScript("foo", element); } @Test public void testShouldUnpackListOfElementArgsWhenCallingScripts() { final WebDriver mockedDriver = mock(WebDriver.class, withSettings().extraInterfaces(JavascriptExecutor.class)); final WebElement mockElement = mock(WebElement.class); when(mockedDriver.findElement(By.id("foo"))).thenReturn(mockElement); EventFiringWebDriver testedDriver = new EventFiringWebDriver(mockedDriver); testedDriver.register(new AbstractWebDriverEventListener() {}); final WebElement foundElement = testedDriver.findElement(By.id("foo")); assertTrue(foundElement instanceof WrapsElement); assertSame(mockElement, ((WrapsElement) foundElement).getWrappedElement()); testedDriver.executeScript("foo", new ArrayList<Object>() {{ add("before"); add(foundElement); add("after"); }}); verify((JavascriptExecutor) mockedDriver).executeScript("foo", new ArrayList<Object>() {{ add("before"); add(mockElement); add("after"); }}); } @Test public void testShouldUnpackMapOfElementArgsWhenCallingScripts() { final WebDriver mockedDriver = mock(WebDriver.class, withSettings().extraInterfaces(JavascriptExecutor.class)); final WebElement mockElement = mock(WebElement.class); when(mockedDriver.findElement(By.id("foo"))).thenReturn(mockElement); EventFiringWebDriver testedDriver = new EventFiringWebDriver(mockedDriver); testedDriver.register(new AbstractWebDriverEventListener() { }); final WebElement foundElement = testedDriver.findElement(By.id("foo")); assertTrue(foundElement instanceof WrapsElement); assertSame(mockElement, ((WrapsElement) foundElement).getWrappedElement()); testedDriver.executeScript("foo", new HashMap<String, Object>() {{ put("foo", "bar"); put("element", foundElement); put("nested", new ArrayList<Object>() {{ add("before"); add(foundElement); add("after"); }}); }}); verify((JavascriptExecutor) mockedDriver).executeScript("foo", new HashMap<String, Object>() {{ put("foo", "bar"); put("element", mockElement); put("nested", new ArrayList<Object>() {{ add("before"); add(mockElement); add("after"); }}); }}); } @Test public void shouldBeAbleToWrapSubclassesOfSomethingImplementingTheWebDriverInterface() { try { new EventFiringWebDriver(new ChildDriver()); // We should get this far } catch (ClassCastException e) { e.printStackTrace(); fail("Should have been able to wrap the child of a webdriver implementing interface"); } } @Test public void shouldBeAbleToAccessWrappedInstanceFromEventCalls() { final WebDriver stub = mock(WebDriver.class); EventFiringWebDriver driver = new EventFiringWebDriver(stub); WebDriver wrapped = driver.getWrappedDriver(); assertEquals(stub, wrapped); class MyListener extends AbstractWebDriverEventListener { @Override public void beforeNavigateTo(String url, WebDriver driver) { WebDriver unwrapped = ((WrapsDriver) driver).getWrappedDriver(); assertEquals(stub, unwrapped); } } driver.register(new MyListener()); driver.get("http://example.org"); } @Test public void shouldBeAbleToAccessWrappedElementInstanceFromEventCalls() { final WebElement stubElement = mock(WebElement.class); final WebDriver stubDriver = mock(WebDriver.class); when(stubDriver.findElement(By.name("stub"))).thenReturn(stubElement); EventFiringWebDriver driver = new EventFiringWebDriver(stubDriver); class MyListener extends AbstractWebDriverEventListener { @Override public void beforeClickOn(WebElement element, WebDriver driver) { assertEquals(stubElement, ((WrapsElement) element).getWrappedElement()); } } driver.register(new MyListener()); driver.findElement(By.name("stub")).click(); } @Test public void shouldReturnLocatorFromToStringMethod() { final WebElement stubElement = mock(WebElement.class); when(stubElement.toString()).thenReturn("cheese"); final WebDriver driver = mock(WebDriver.class); when(driver.findElement(By.id("ignored"))).thenReturn(stubElement); EventFiringWebDriver firingDriver = new EventFiringWebDriver(driver); WebElement firingElement = firingDriver.findElement(By.id("ignored")); assertEquals(stubElement.toString(), firingElement.toString()); } private static class ChildDriver extends StubDriver {} }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.ReplicaShardAllocator; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Random; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; /** */ public abstract class ESAllocationTestCase extends ESTestCase { public static MockAllocationService createAllocationService() { return createAllocationService(Settings.Builder.EMPTY_SETTINGS); } public static MockAllocationService createAllocationService(Settings settings) { return createAllocationService(settings, getRandom()); } public static MockAllocationService createAllocationService(Settings settings, Random random) { return createAllocationService(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random); } public static MockAllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings, Random random) { return new MockAllocationService(settings, randomAllocationDeciders(settings, clusterSettings, random), new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), EmptyClusterInfoService.INSTANCE); } public static MockAllocationService createAllocationService(Settings settings, ClusterInfoService clusterInfoService) { return new MockAllocationService(settings, randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings, NoopGatewayAllocator.INSTANCE), clusterInfoService); } public static MockAllocationService createAllocationService(Settings settings, GatewayAllocator allocator) { return new MockAllocationService(settings, randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()), new ShardsAllocators(settings, allocator), EmptyClusterInfoService.INSTANCE); } public static AllocationDeciders randomAllocationDeciders(Settings settings, ClusterSettings clusterSettings, Random random) { final List<Class<? extends AllocationDecider>> defaultAllocationDeciders = ClusterModule.DEFAULT_ALLOCATION_DECIDERS; final List<AllocationDecider> list = new ArrayList<>(); for (Class<? extends AllocationDecider> deciderClass : ClusterModule.DEFAULT_ALLOCATION_DECIDERS) { try { try { Constructor<? extends AllocationDecider> constructor = deciderClass.getConstructor(Settings.class, ClusterSettings.class); list.add(constructor.newInstance(settings, clusterSettings)); } catch (NoSuchMethodException e) { Constructor<? extends AllocationDecider> constructor = null; constructor = deciderClass.getConstructor(Settings.class); list.add(constructor.newInstance(settings)); } } catch (Exception ex) { throw new RuntimeException(ex); } } assertThat(list.size(), equalTo(defaultAllocationDeciders.size())); for (AllocationDecider d : list) { assertThat(defaultAllocationDeciders.contains(d.getClass()), is(true)); } Randomness.shuffle(list); return new AllocationDeciders(settings, list.toArray(new AllocationDecider[0])); } public static DiscoveryNode newNode(String nodeId) { return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, Version.CURRENT); } public static DiscoveryNode newNode(String nodeId, TransportAddress address) { return new DiscoveryNode(nodeId, address, Version.CURRENT); } public static DiscoveryNode newNode(String nodeId, Map<String, String> attributes) { return new DiscoveryNode("", nodeId, DummyTransportAddress.INSTANCE, attributes, Version.CURRENT); } public static DiscoveryNode newNode(String nodeName,String nodeId, Map<String, String> attributes) { return new DiscoveryNode(nodeName, nodeId, DummyTransportAddress.INSTANCE, attributes, Version.CURRENT); } public static DiscoveryNode newNode(String nodeId, Version version) { return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, version); } public static ClusterState startRandomInitializingShard(ClusterState clusterState, AllocationService strategy) { List<ShardRouting> initializingShards = clusterState.getRoutingNodes().shardsWithState(INITIALIZING); if (initializingShards.isEmpty()) { return clusterState; } RoutingTable routingTable = strategy.applyStartedShards(clusterState, arrayAsArrayList(initializingShards.get(randomInt(initializingShards.size() - 1)))).routingTable(); return ClusterState.builder(clusterState).routingTable(routingTable).build(); } public static AllocationDeciders yesAllocationDeciders() { return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.YES)}); } public static AllocationDeciders noAllocationDeciders() { return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.NO)}); } public static AllocationDeciders throttleAllocationDeciders() { return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.THROTTLE)}); } public static class TestAllocateDecision extends AllocationDecider { private final Decision decision; public TestAllocateDecision(Decision decision) { super(Settings.EMPTY); this.decision = decision; } @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return decision; } @Override public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { return decision; } @Override public Decision canAllocate(RoutingNode node, RoutingAllocation allocation) { return decision; } } /** A lock {@link AllocationService} allowing tests to override time */ protected static class MockAllocationService extends AllocationService { private Long nanoTimeOverride = null; public MockAllocationService(Settings settings, AllocationDeciders allocationDeciders, ShardsAllocators shardsAllocators, ClusterInfoService clusterInfoService) { super(settings, allocationDeciders, shardsAllocators, clusterInfoService); } public void setNanoTimeOverride(long nanoTime) { this.nanoTimeOverride = nanoTime; } @Override protected long currentNanoTime() { return nanoTimeOverride == null ? super.currentNanoTime() : nanoTimeOverride; } } /** * Mocks behavior in ReplicaShardAllocator to remove delayed shards from list of unassigned shards so they don't get reassigned yet. */ protected static class DelayedShardsMockGatewayAllocator extends GatewayAllocator { private final ReplicaShardAllocator replicaShardAllocator = new ReplicaShardAllocator(Settings.EMPTY) { @Override protected AsyncShardFetch.FetchResult<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> fetchData(ShardRouting shard, RoutingAllocation allocation) { return new AsyncShardFetch.FetchResult<>(shard.shardId(), null, Collections.<String>emptySet(), Collections.<String>emptySet()); } }; public DelayedShardsMockGatewayAllocator() { super(Settings.EMPTY, null, null); } @Override public void applyStartedShards(StartedRerouteAllocation allocation) {} @Override public void applyFailedShards(FailedRerouteAllocation allocation) {} @Override public boolean allocateUnassigned(RoutingAllocation allocation) { final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = allocation.routingNodes().unassigned().iterator(); boolean changed = false; while (unassignedIterator.hasNext()) { ShardRouting shard = unassignedIterator.next(); IndexMetaData indexMetaData = allocation.metaData().index(shard.getIndex()); if (shard.primary() || shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } changed |= replicaShardAllocator.ignoreUnassignedIfDelayed(unassignedIterator, shard); } return changed; } } }
package com.pacificmetrics.orca.export.saaif; import java.util.ArrayList; import java.util.List; import java.util.Map; public class SAAIFItem { private String id; private String uniqueId; private String externalId; private int version; private String bankKey; private String format; private String type; private Map<String,String> attachments; private Map<String,String> assets; private List<SAAIFItem> tutorials; private List<SAAIFItem> wordlists; private List<SAAIFItem> passages; private String href; private String hrefBase; private String metadataHref; private String metadataHrefBase; private String xmlContent; private String metadataXmlContent; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getUniqueId() { return uniqueId; } public void setUniqueId(String uniqueId) { this.uniqueId = uniqueId; } public int getVersion() { return version; } public void setVersion(int version) { this.version = version; } public String getBankKey() { return bankKey; } public void setBankKey(String bankKey) { this.bankKey = bankKey; } public String getFormat() { return format; } public void setFormat(String format) { this.format = format; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getHref() { return href; } public void setHref(String href) { this.href = href; } public String getHrefBase() { return hrefBase; } public void setHrefBase(String hrefBase) { this.hrefBase = hrefBase; } public String getMetadataHref() { return metadataHref; } public void setMetadataHref(String metadataHref) { this.metadataHref = metadataHref; } public String getMetadataHrefBase() { return metadataHrefBase; } public void setMetadataHrefBase(String metadataHrefBase) { this.metadataHrefBase = metadataHrefBase; } public Map<String, String> getAttachments() { return attachments; } public void setAttachments(Map<String, String> attachments) { this.attachments = attachments; } public Map<String, String> getAssets() { return assets; } public void setAssets(Map<String, String> assets) { this.assets = assets; } public List<SAAIFItem> getTutorials() { if (tutorials == null) { tutorials = new ArrayList<SAAIFItem>(); } return tutorials; } public void setTutorials(List<SAAIFItem> tutorials) { this.tutorials = tutorials; } public SAAIFItem getTutorialById(String tutorialId) { for( SAAIFItem tutorial : getTutorials()) { if(tutorial.getId().equalsIgnoreCase(tutorialId)) { return tutorial; } } return null; } public boolean isTutorialAdded(String newTutorialId) { for( SAAIFItem tutorial : getTutorials()) { if(tutorial.getId().equalsIgnoreCase(newTutorialId)) { return true; } } return false; } public List<SAAIFItem> getWordlists() { if(wordlists == null) { wordlists = new ArrayList<SAAIFItem> (); } return wordlists; } public void setWordlists(List<SAAIFItem> wordlists) { this.wordlists = wordlists; } public boolean isWordlistAdded(String newWordlistId) { for( SAAIFItem wordlist : getWordlists()) { if(wordlist.getId().equalsIgnoreCase(newWordlistId)) { return true; } } return false; } public SAAIFItem getWordlistById(String wordlistId) { for( SAAIFItem wordlist : getWordlists()) { if(wordlist.getId().equalsIgnoreCase(wordlistId)) { return wordlist; } } return null; } public List<SAAIFItem> getPassages() { if(passages == null) { passages = new ArrayList<SAAIFItem>(); } return passages; } public void setPassages(List<SAAIFItem> passages) { this.passages = passages; } public boolean isPassageAdded(String newPassageId) { for( SAAIFItem passage : getPassages()) { if(passage.getId().equalsIgnoreCase(newPassageId)) { return true; } } return false; } public SAAIFItem getPassageById(String passageId) { for( SAAIFItem passage : getPassages()) { if(passage.getId().equalsIgnoreCase(passageId)) { return passage; } } return null; } public String getXmlContent() { return xmlContent; } public void setXmlContent(String xmlContent) { this.xmlContent = xmlContent; } public String getMetadataXmlContent() { return metadataXmlContent; } public void setMetadataXmlContent(String metadataXmlContent) { this.metadataXmlContent = metadataXmlContent; } public String getExternalId() { return externalId; } public void setExternalId(String externalId) { this.externalId = externalId; } }
/* * Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.authenticator.oidc; import org.apache.oltu.oauth2.client.OAuthClient; import org.apache.oltu.oauth2.client.URLConnectionClient; import org.apache.oltu.oauth2.client.request.OAuthClientRequest; import org.apache.oltu.oauth2.client.response.OAuthAuthzResponse; import org.apache.oltu.oauth2.client.response.OAuthClientResponse; import org.apache.oltu.oauth2.common.exception.OAuthProblemException; import org.apache.oltu.oauth2.common.exception.OAuthSystemException; import org.apache.oltu.oauth2.common.message.types.GrantType; import org.apache.oltu.oauth2.common.utils.JSONUtils; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.application.authentication.framework.AbstractApplicationAuthenticator; import org.wso2.carbon.identity.application.authentication.framework.FederatedApplicationAuthenticator; import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext; import org.wso2.carbon.identity.application.authentication.framework.exception.AuthenticationFailedException; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils; import org.wso2.carbon.identity.application.common.model.ClaimMapping; import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants; import org.wso2.carbon.identity.core.util.IdentityUtil; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.HashMap; import java.util.Map; public class OpenIDConnectAuthenticator extends AbstractApplicationAuthenticator implements FederatedApplicationAuthenticator { private static final long serialVersionUID = -4154255583070524018L; private static Log log = LogFactory.getLog(OpenIDConnectAuthenticator.class); @Override public boolean canHandle(HttpServletRequest request) { if (log.isTraceEnabled()) { log.trace("Inside OpenIDConnectAuthenticator.canHandle()"); } // Check commonauth got an OIDC response if (request.getParameter(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) != null && request.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE) != null && OIDCAuthenticatorConstants.LOGIN_TYPE.equals(getLoginType(request))) { return true; } else if (request.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE) != null && request.getParameter(OIDCAuthenticatorConstants.OAUTH2_ERROR) != null) { //if sends error like access_denied return true; } // TODO : What if IdP failed? return false; } /** * @return */ protected String getAuthorizationServerEndpoint(Map<String, String> authenticatorProperties) { return null; } /** * @return */ protected String getCallbackUrl(Map<String, String> authenticatorProperties) { return null; } /** * @return */ protected String getTokenEndpoint(Map<String, String> authenticatorProperties) { return null; } /** * @param state * @return */ protected String getState(String state, Map<String, String> authenticatorProperties) { return state; } /** * @return */ protected String getScope(String scope, Map<String, String> authenticatorProperties) { return scope; } /** * @return */ protected boolean requiredIDToken(Map<String, String> authenticatorProperties) { return true; } /** * @param token * @return */ protected String getAuthenticateUser(OAuthClientResponse token) { return null; } /** * @param token * @return */ protected Map<ClaimMapping, String> getSubjectAttributes(OAuthClientResponse token) { return new HashMap<ClaimMapping, String>(); } @Override protected void initiateAuthenticationRequest(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws AuthenticationFailedException { try { Map<String, String> authenticatorProperties = context.getAuthenticatorProperties(); if (authenticatorProperties != null) { String clientId = authenticatorProperties.get(OIDCAuthenticatorConstants.CLIENT_ID); String authorizationEP = getAuthorizationServerEndpoint(authenticatorProperties); if (authorizationEP == null) { authorizationEP = authenticatorProperties .get(OIDCAuthenticatorConstants.OAUTH2_AUTHZ_URL); } String callbackurl = getCallbackUrl(authenticatorProperties); if (callbackurl == null) { callbackurl = IdentityUtil.getServerURL(FrameworkConstants.COMMONAUTH); } String state = context.getContextIdentifier() + "," + OIDCAuthenticatorConstants.LOGIN_TYPE; state = getState(state, authenticatorProperties); OAuthClientRequest authzRequest; String queryString = authenticatorProperties.get(FrameworkConstants.QUERY_PARAMS); Map<String, String> paramValueMap = new HashMap<String, String>(); if (queryString != null) { String[] params = queryString.split("&"); if (params != null && params.length > 0) { for (String param : params) { String[] intParam = param.split("="); paramValueMap.put(intParam[0], intParam[1]); } context.setProperty("oidc:param.map", paramValueMap); } } String scope = paramValueMap.get("scope"); if (scope == null) { scope = OIDCAuthenticatorConstants.OAUTH_OIDC_SCOPE; } scope = getScope(scope, authenticatorProperties); if (queryString != null && queryString.toLowerCase().contains("scope=") && queryString.toLowerCase().contains("redirect_uri=")) { authzRequest = OAuthClientRequest.authorizationLocation(authorizationEP) .setClientId(clientId) .setResponseType(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) .setState(state).buildQueryMessage(); } else if (queryString != null && queryString.toLowerCase().contains("scope=")) { authzRequest = OAuthClientRequest.authorizationLocation(authorizationEP) .setClientId(clientId).setRedirectURI(callbackurl) .setResponseType(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) .setState(state).buildQueryMessage(); } else if (queryString != null && queryString.toLowerCase().contains("redirect_uri=")) { authzRequest = OAuthClientRequest.authorizationLocation(authorizationEP) .setClientId(clientId) .setResponseType(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) .setScope(OIDCAuthenticatorConstants.OAUTH_OIDC_SCOPE).setState(state) .buildQueryMessage(); } else { authzRequest = OAuthClientRequest.authorizationLocation(authorizationEP) .setClientId(clientId).setRedirectURI(callbackurl) .setResponseType(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) .setScope(scope).setState(state).buildQueryMessage(); } String loginPage = authzRequest.getLocationUri(); String domain = request.getParameter("domain"); if (domain != null) { loginPage = loginPage + "&fidp=" + domain; } if (queryString != null) { if (!queryString.startsWith("&")) { loginPage = loginPage + "&" + queryString; } else { loginPage = loginPage + queryString; } } response.sendRedirect(loginPage); } else { if (log.isDebugEnabled()) { log.debug("Error while retrieving properties. Authenticator Properties cannot be null"); } throw new AuthenticationFailedException( "Error while retrieving properties. Authenticator Properties cannot be null"); } } catch (IOException e) { log.error("Exception while sending to the login page", e); throw new AuthenticationFailedException(e.getMessage(), e); } catch (OAuthSystemException e) { log.error("Exception while building authorization code request", e); throw new AuthenticationFailedException(e.getMessage(), e); } return; } @Override protected void processAuthenticationResponse(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws AuthenticationFailedException { try { Map<String, String> authenticatorProperties = context.getAuthenticatorProperties(); String clientId = authenticatorProperties.get(OIDCAuthenticatorConstants.CLIENT_ID); String clientSecret = authenticatorProperties .get(OIDCAuthenticatorConstants.CLIENT_SECRET); String tokenEndPoint = getTokenEndpoint(authenticatorProperties); if (tokenEndPoint == null) { tokenEndPoint = authenticatorProperties .get(OIDCAuthenticatorConstants.OAUTH2_TOKEN_URL); } String callbackurl = getCallbackUrl(authenticatorProperties); if (callbackurl == null) { callbackurl = IdentityUtil.getServerURL(FrameworkConstants.COMMONAUTH); } @SuppressWarnings({"unchecked"}) Map<String, String> paramValueMap = (Map<String, String>) context .getProperty("oidc:param.map"); if (paramValueMap != null && paramValueMap.containsKey("redirect_uri")) { callbackurl = paramValueMap.get("redirect_uri"); } OAuthAuthzResponse authzResponse = OAuthAuthzResponse.oauthCodeAuthzResponse(request); String code = authzResponse.getCode(); OAuthClientRequest accessRequest = null; accessRequest = getaccessRequest(tokenEndPoint, clientId, code, clientSecret, callbackurl); // create OAuth client that uses custom http client under the hood OAuthClient oAuthClient = new OAuthClient(new URLConnectionClient()); OAuthClientResponse oAuthResponse = null; oAuthResponse = getOauthResponse(oAuthClient, accessRequest); // TODO : return access token and id token to framework String accessToken = oAuthResponse.getParam(OIDCAuthenticatorConstants.ACCESS_TOKEN); String idToken = oAuthResponse.getParam(OIDCAuthenticatorConstants.ID_TOKEN); if (accessToken != null && (idToken != null || !requiredIDToken(authenticatorProperties))) { context.setProperty(OIDCAuthenticatorConstants.ACCESS_TOKEN, accessToken); if (idToken != null) { context.setProperty(OIDCAuthenticatorConstants.ID_TOKEN, idToken); String base64Body = idToken.split("\\.")[1]; byte[] decoded = Base64.decodeBase64(base64Body.getBytes()); String json = new String(decoded); Map<String, Object> jsonObject = JSONUtils.parseJSON(json); if (jsonObject != null) { Map<ClaimMapping, String> claims = new HashMap<ClaimMapping, String>(); for (Map.Entry<String, Object> entry : jsonObject.entrySet()) { claims.put( ClaimMapping.build(entry.getKey(), entry.getKey(), null, false), entry.getValue().toString()); if (log.isDebugEnabled()) { log.debug("Adding claim mapping : " + entry.getKey() + " <> " + entry.getKey() + " : " + entry.getValue()); } } String authenticatedUser = null; String isSubjectInClaimsProp = context.getAuthenticatorProperties().get( IdentityApplicationConstants.Authenticator.SAML2SSO.IS_USER_ID_IN_CLAIMS); if ("true".equalsIgnoreCase(isSubjectInClaimsProp)) { authenticatedUser = getSubjectFromUserIDClaimURI(context); if (authenticatedUser == null) { if (log.isDebugEnabled()) { log.debug("Subject claim could not be found amongst subject attributes. " + "Defaulting to sub attribute in IDToken."); } } } if (authenticatedUser == null) { authenticatedUser = (String) jsonObject.get("sub"); } if (authenticatedUser == null) { throw new AuthenticationFailedException("Cannot find federated User Identifier"); } AuthenticatedUser authenticatedUserObj = AuthenticatedUser .createFederateAuthenticatedUserFromSubjectIdentifier(authenticatedUser); authenticatedUserObj.setUserAttributes(claims); context.setSubject(authenticatedUserObj); } else { if (log.isDebugEnabled()) { log.debug("Decoded json object is null"); } throw new AuthenticationFailedException("Decoded json object is null"); } } else { AuthenticatedUser authenticatedUserObj = AuthenticatedUser .createFederateAuthenticatedUserFromSubjectIdentifier(getAuthenticateUser(oAuthResponse)); authenticatedUserObj.setUserAttributes(getSubjectAttributes(oAuthResponse)); context.setSubject(authenticatedUserObj); } } else { throw new AuthenticationFailedException("Authentication Failed"); } } catch (OAuthProblemException e) { log.error(e.getMessage(), e); throw new AuthenticationFailedException(e.getMessage(), e); } } private OAuthClientRequest getaccessRequest(String tokenEndPoint, String clientId, String code, String clientSecret, String callbackurl) throws AuthenticationFailedException { OAuthClientRequest accessRequest = null; try { accessRequest = OAuthClientRequest.tokenLocation(tokenEndPoint) .setGrantType(GrantType.AUTHORIZATION_CODE).setClientId(clientId) .setClientSecret(clientSecret).setRedirectURI(callbackurl).setCode(code) .buildBodyMessage(); } catch (OAuthSystemException e) { if (log.isDebugEnabled()) { log.debug("Exception while building request for request access token", e); } throw new AuthenticationFailedException(e.getMessage(), e); } return accessRequest; } private OAuthClientResponse getOauthResponse(OAuthClient oAuthClient, OAuthClientRequest accessRequest) throws AuthenticationFailedException { OAuthClientResponse oAuthResponse = null; try { oAuthResponse = oAuthClient.accessToken(accessRequest); } catch (OAuthSystemException e) { if (log.isDebugEnabled()) { log.debug("Exception while requesting access token", e); } throw new AuthenticationFailedException(e.getMessage(), e); } catch (OAuthProblemException e) { if (log.isDebugEnabled()) { log.debug("Exception while requesting access token", e); } } return oAuthResponse; } @Override public String getContextIdentifier(HttpServletRequest request) { if (log.isTraceEnabled()) { log.trace("Inside OpenIDConnectAuthenticator.getContextIdentifier()"); } String state = request.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE); if (state != null) { return state.split(",")[0]; } else { return null; } } private String getLoginType(HttpServletRequest request) { String state = request.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE); if (state != null) { return state.split(",")[1]; } else { return null; } } @Override public String getFriendlyName() { return "openidconnect"; } @Override public String getName() { return OIDCAuthenticatorConstants.AUTHENTICATOR_NAME; } @Override public String getClaimDialectURI() { return "http://wso2.org/oidc/claim"; } /** * @subject */ protected String getSubjectFromUserIDClaimURI(AuthenticationContext context) { String subject = null; try { subject = FrameworkUtils.getFederatedSubjectFromClaims(context, getClaimDialectURI()); } catch (Exception e) { if(log.isDebugEnabled()) { log.debug("Couldn't find the subject claim from claim mappings ", e); } } return subject; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.ipc; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandlerImpl; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader; import org.apache.hadoop.hbase.testclassification.RPCTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdge; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @Category({RPCTests.class, SmallTests.class}) public class TestSimpleRpcScheduler { @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); private static final Log LOG = LogFactory.getLog(TestSimpleRpcScheduler.class); private final RpcScheduler.Context CONTEXT = new RpcScheduler.Context() { @Override public InetSocketAddress getListenerAddress() { return InetSocketAddress.createUnresolved("127.0.0.1", 1000); } }; private Configuration conf; @Before public void setUp() { conf = HBaseConfiguration.create(); } @Test public void testBasic() throws IOException, InterruptedException { PriorityFunction qosFunction = mock(PriorityFunction.class); RpcScheduler scheduler = new SimpleRpcScheduler( conf, 10, 0, 0, qosFunction, 0); scheduler.init(CONTEXT); scheduler.start(); CallRunner task = createMockTask(); task.setStatus(new MonitoredRPCHandlerImpl()); scheduler.dispatch(task); verify(task, timeout(1000)).run(); scheduler.stop(); } @Test public void testHandlerIsolation() throws IOException, InterruptedException { CallRunner generalTask = createMockTask(); CallRunner priorityTask = createMockTask(); CallRunner replicationTask = createMockTask(); List<CallRunner> tasks = ImmutableList.of( generalTask, priorityTask, replicationTask); Map<CallRunner, Integer> qos = ImmutableMap.of( generalTask, 0, priorityTask, HConstants.HIGH_QOS + 1, replicationTask, HConstants.REPLICATION_QOS); PriorityFunction qosFunction = mock(PriorityFunction.class); final Map<CallRunner, Thread> handlerThreads = Maps.newHashMap(); final CountDownLatch countDownLatch = new CountDownLatch(tasks.size()); Answer<Void> answerToRun = new Answer<Void>() { @Override public Void answer(InvocationOnMock invocationOnMock) throws Throwable { synchronized (handlerThreads) { handlerThreads.put( (CallRunner) invocationOnMock.getMock(), Thread.currentThread()); } countDownLatch.countDown(); return null; } }; for (CallRunner task : tasks) { task.setStatus(new MonitoredRPCHandlerImpl()); doAnswer(answerToRun).when(task).run(); } RpcScheduler scheduler = new SimpleRpcScheduler( conf, 1, 1 ,1, qosFunction, HConstants.HIGH_QOS); scheduler.init(CONTEXT); scheduler.start(); for (CallRunner task : tasks) { when(qosFunction.getPriority((RPCProtos.RequestHeader) anyObject(), (Message) anyObject(), (User) anyObject())) .thenReturn(qos.get(task)); scheduler.dispatch(task); } for (CallRunner task : tasks) { verify(task, timeout(1000)).run(); } scheduler.stop(); // Tests that these requests are handled by three distinct threads. countDownLatch.await(); assertEquals(3, ImmutableSet.copyOf(handlerThreads.values()).size()); } private CallRunner createMockTask() { ServerCall call = mock(ServerCall.class); CallRunner task = mock(CallRunner.class); when(task.getRpcCall()).thenReturn(call); when(call.getRequestUser()).thenReturn(Optional.empty()); return task; } @Test public void testRpcScheduler() throws Exception { testRpcScheduler(RpcExecutor.CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE); testRpcScheduler(RpcExecutor.CALL_QUEUE_TYPE_FIFO_CONF_VALUE); } private void testRpcScheduler(final String queueType) throws Exception { Configuration schedConf = HBaseConfiguration.create(); schedConf.set(RpcExecutor.CALL_QUEUE_TYPE_CONF_KEY, queueType); PriorityFunction priority = mock(PriorityFunction.class); when(priority.getPriority(any(RequestHeader.class), any(Message.class), any(User.class))) .thenReturn(HConstants.NORMAL_QOS); RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 1, 1, 1, priority, HConstants.QOS_THRESHOLD); try { scheduler.start(); CallRunner smallCallTask = mock(CallRunner.class); ServerCall smallCall = mock(ServerCall.class); RequestHeader smallHead = RequestHeader.newBuilder().setCallId(1).build(); when(smallCallTask.getRpcCall()).thenReturn(smallCall); when(smallCall.getHeader()).thenReturn(smallHead); when(smallCall.getRequestUser()).thenReturn(Optional.empty()); CallRunner largeCallTask = mock(CallRunner.class); ServerCall largeCall = mock(ServerCall.class); RequestHeader largeHead = RequestHeader.newBuilder().setCallId(50).build(); when(largeCallTask.getRpcCall()).thenReturn(largeCall); when(largeCall.getHeader()).thenReturn(largeHead); when(largeCall.getRequestUser()).thenReturn(Optional.empty()); CallRunner hugeCallTask = mock(CallRunner.class); ServerCall hugeCall = mock(ServerCall.class); RequestHeader hugeHead = RequestHeader.newBuilder().setCallId(100).build(); when(hugeCallTask.getRpcCall()).thenReturn(hugeCall); when(hugeCall.getHeader()).thenReturn(hugeHead); when(hugeCall.getRequestUser()).thenReturn(Optional.empty()); when(priority.getDeadline(eq(smallHead), any(Message.class))).thenReturn(0L); when(priority.getDeadline(eq(largeHead), any(Message.class))).thenReturn(50L); when(priority.getDeadline(eq(hugeHead), any(Message.class))).thenReturn(100L); final ArrayList<Integer> work = new ArrayList<>(); doAnswerTaskExecution(smallCallTask, work, 10, 250); doAnswerTaskExecution(largeCallTask, work, 50, 250); doAnswerTaskExecution(hugeCallTask, work, 100, 250); scheduler.dispatch(smallCallTask); scheduler.dispatch(smallCallTask); scheduler.dispatch(smallCallTask); scheduler.dispatch(hugeCallTask); scheduler.dispatch(smallCallTask); scheduler.dispatch(largeCallTask); scheduler.dispatch(smallCallTask); scheduler.dispatch(smallCallTask); while (work.size() < 8) { Thread.sleep(100); } int seqSum = 0; int totalTime = 0; for (int i = 0; i < work.size(); ++i) { LOG.debug("Request i=" + i + " value=" + work.get(i)); seqSum += work.get(i); totalTime += seqSum; } LOG.debug("Total Time: " + totalTime); // -> [small small small huge small large small small] // -> NO REORDER [10 10 10 100 10 50 10 10] -> 930 (FIFO Queue) // -> WITH REORDER [10 10 10 10 10 10 50 100] -> 530 (Deadline Queue) if (queueType.equals(RpcExecutor.CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE)) { assertEquals(530, totalTime); } else if (queueType.equals(RpcExecutor.CALL_QUEUE_TYPE_FIFO_CONF_VALUE)) { assertEquals(930, totalTime); } } finally { scheduler.stop(); } } @Test public void testScanQueueWithZeroScanRatio() throws Exception { Configuration schedConf = HBaseConfiguration.create(); schedConf.setFloat(RpcExecutor.CALL_QUEUE_HANDLER_FACTOR_CONF_KEY, 1.0f); schedConf.setFloat(RWQueueRpcExecutor.CALL_QUEUE_READ_SHARE_CONF_KEY, 0.5f); schedConf.setFloat(RWQueueRpcExecutor.CALL_QUEUE_SCAN_SHARE_CONF_KEY, 0f); PriorityFunction priority = mock(PriorityFunction.class); when(priority.getPriority(any(RequestHeader.class), any(Message.class), any(User.class))).thenReturn(HConstants.NORMAL_QOS); RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 2, 1, 1, priority, HConstants.QOS_THRESHOLD); assertNotEquals(scheduler, null); } @Test public void testScanQueues() throws Exception { Configuration schedConf = HBaseConfiguration.create(); schedConf.setFloat(RpcExecutor.CALL_QUEUE_HANDLER_FACTOR_CONF_KEY, 1.0f); schedConf.setFloat(RWQueueRpcExecutor.CALL_QUEUE_READ_SHARE_CONF_KEY, 0.7f); schedConf.setFloat(RWQueueRpcExecutor.CALL_QUEUE_SCAN_SHARE_CONF_KEY, 0.5f); PriorityFunction priority = mock(PriorityFunction.class); when(priority.getPriority(any(RPCProtos.RequestHeader.class), any(Message.class), any(User.class))).thenReturn(HConstants.NORMAL_QOS); RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 3, 1, 1, priority, HConstants.QOS_THRESHOLD); try { scheduler.start(); CallRunner putCallTask = mock(CallRunner.class); ServerCall putCall = mock(ServerCall.class); putCall.param = RequestConverter.buildMutateRequest( Bytes.toBytes("abc"), new Put(Bytes.toBytes("row"))); RequestHeader putHead = RequestHeader.newBuilder().setMethodName("mutate").build(); when(putCallTask.getRpcCall()).thenReturn(putCall); when(putCall.getHeader()).thenReturn(putHead); when(putCall.getParam()).thenReturn(putCall.param); when(putCall.getRequestUser()).thenReturn(Optional.empty()); CallRunner getCallTask = mock(CallRunner.class); ServerCall getCall = mock(ServerCall.class); RequestHeader getHead = RequestHeader.newBuilder().setMethodName("get").build(); when(getCallTask.getRpcCall()).thenReturn(getCall); when(getCall.getHeader()).thenReturn(getHead); when(getCall.getRequestUser()).thenReturn(Optional.empty()); CallRunner scanCallTask = mock(CallRunner.class); ServerCall scanCall = mock(ServerCall.class); scanCall.param = ScanRequest.newBuilder().setScannerId(1).build(); RequestHeader scanHead = RequestHeader.newBuilder().setMethodName("scan").build(); when(scanCallTask.getRpcCall()).thenReturn(scanCall); when(scanCall.getHeader()).thenReturn(scanHead); when(scanCall.getParam()).thenReturn(scanCall.param); when(scanCall.getRequestUser()).thenReturn(Optional.empty()); ArrayList<Integer> work = new ArrayList<>(); doAnswerTaskExecution(putCallTask, work, 1, 1000); doAnswerTaskExecution(getCallTask, work, 2, 1000); doAnswerTaskExecution(scanCallTask, work, 3, 1000); // There are 3 queues: [puts], [gets], [scans] // so the calls will be interleaved scheduler.dispatch(putCallTask); scheduler.dispatch(putCallTask); scheduler.dispatch(putCallTask); scheduler.dispatch(getCallTask); scheduler.dispatch(getCallTask); scheduler.dispatch(getCallTask); scheduler.dispatch(scanCallTask); scheduler.dispatch(scanCallTask); scheduler.dispatch(scanCallTask); while (work.size() < 6) { Thread.sleep(100); } for (int i = 0; i < work.size() - 2; i += 3) { assertNotEquals(work.get(i + 0), work.get(i + 1)); assertNotEquals(work.get(i + 0), work.get(i + 2)); assertNotEquals(work.get(i + 1), work.get(i + 2)); } } finally { scheduler.stop(); } } private void doAnswerTaskExecution(final CallRunner callTask, final ArrayList<Integer> results, final int value, final int sleepInterval) { callTask.setStatus(new MonitoredRPCHandlerImpl()); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) { synchronized (results) { results.add(value); } Threads.sleepWithoutInterrupt(sleepInterval); return null; } }).when(callTask).run(); } private static void waitUntilQueueEmpty(SimpleRpcScheduler scheduler) throws InterruptedException { while (scheduler.getGeneralQueueLength() > 0) { Thread.sleep(100); } } @Test public void testSoftAndHardQueueLimits() throws Exception { Configuration schedConf = HBaseConfiguration.create(); schedConf.setInt(HConstants.REGION_SERVER_HANDLER_COUNT, 0); schedConf.setInt("hbase.ipc.server.max.callqueue.length", 5); PriorityFunction priority = mock(PriorityFunction.class); when(priority.getPriority(any(RequestHeader.class), any(Message.class), any(User.class))).thenReturn(HConstants.NORMAL_QOS); SimpleRpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 0, 0, 0, priority, HConstants.QOS_THRESHOLD); try { scheduler.start(); CallRunner putCallTask = mock(CallRunner.class); ServerCall putCall = mock(ServerCall.class); putCall.param = RequestConverter.buildMutateRequest( Bytes.toBytes("abc"), new Put(Bytes.toBytes("row"))); RequestHeader putHead = RequestHeader.newBuilder().setMethodName("mutate").build(); when(putCallTask.getRpcCall()).thenReturn(putCall); when(putCall.getHeader()).thenReturn(putHead); when(putCall.getRequestUser()).thenReturn(Optional.empty()); assertTrue(scheduler.dispatch(putCallTask)); schedConf.setInt("hbase.ipc.server.max.callqueue.length", 0); scheduler.onConfigurationChange(schedConf); assertFalse(scheduler.dispatch(putCallTask)); waitUntilQueueEmpty(scheduler); schedConf.setInt("hbase.ipc.server.max.callqueue.length", 1); scheduler.onConfigurationChange(schedConf); assertTrue(scheduler.dispatch(putCallTask)); } finally { scheduler.stop(); } } private static final class CoDelEnvironmentEdge implements EnvironmentEdge { private final BlockingQueue<Long> timeQ = new LinkedBlockingQueue<>(); private long offset; private final Set<String> threadNamePrefixs = new HashSet<>(); @Override public long currentTime() { for (String threadNamePrefix : threadNamePrefixs) { String threadName = Thread.currentThread().getName(); if (threadName.startsWith(threadNamePrefix)) { return timeQ.poll().longValue() + offset; } } return System.currentTimeMillis(); } } // FIX. I don't get this test (St.Ack). When I time this test, the minDelay is > 2 * codel delay from the get go. // So we are always overloaded. The test below would seem to complete the queuing of all the CallRunners inside // the codel check interval. I don't think we are skipping codel checking. Second, I think this test has been // broken since HBASE-16089 Add on FastPath for CoDel went in. The thread name we were looking for was the name // BEFORE we updated: i.e. "RpcServer.CodelBQ.default.handler". But same patch changed the name of the codel // fastpath thread to: new FastPathBalancedQueueRpcExecutor("CodelFPBQ.default", handlerCount, numCallQueues... // Codel is hard to test. This test is going to be flakey given it all timer-based. Disabling for now till chat // with authors. @Test public void testCoDelScheduling() throws Exception { CoDelEnvironmentEdge envEdge = new CoDelEnvironmentEdge(); envEdge.threadNamePrefixs.add("RpcServer.default.FPBQ.Codel.handler"); Configuration schedConf = HBaseConfiguration.create(); schedConf.setInt(RpcScheduler.IPC_SERVER_MAX_CALLQUEUE_LENGTH, 250); schedConf.set(RpcExecutor.CALL_QUEUE_TYPE_CONF_KEY, RpcExecutor.CALL_QUEUE_TYPE_CODEL_CONF_VALUE); PriorityFunction priority = mock(PriorityFunction.class); when(priority.getPriority(any(RPCProtos.RequestHeader.class), any(Message.class), any(User.class))).thenReturn(HConstants.NORMAL_QOS); SimpleRpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 1, 1, 1, priority, HConstants.QOS_THRESHOLD); try { // Loading mocked call runner can take a good amount of time the first time through (haven't looked why). // Load it for first time here outside of the timed loop. getMockedCallRunner(System.currentTimeMillis(), 2); scheduler.start(); EnvironmentEdgeManager.injectEdge(envEdge); envEdge.offset = 5; // Calls faster than min delay // LOG.info("Start"); for (int i = 0; i < 100; i++) { long time = System.currentTimeMillis(); envEdge.timeQ.put(time); CallRunner cr = getMockedCallRunner(time, 2); // LOG.info("" + i + " " + (System.currentTimeMillis() - now) + " cr=" + cr); scheduler.dispatch(cr); } // LOG.info("Loop done"); // make sure fast calls are handled waitUntilQueueEmpty(scheduler); Thread.sleep(100); assertEquals("None of these calls should have been discarded", 0, scheduler.getNumGeneralCallsDropped()); envEdge.offset = 151; // calls slower than min delay, but not individually slow enough to be dropped for (int i = 0; i < 20; i++) { long time = System.currentTimeMillis(); envEdge.timeQ.put(time); CallRunner cr = getMockedCallRunner(time, 2); scheduler.dispatch(cr); } // make sure somewhat slow calls are handled waitUntilQueueEmpty(scheduler); Thread.sleep(100); assertEquals("None of these calls should have been discarded", 0, scheduler.getNumGeneralCallsDropped()); envEdge.offset = 2000; // now slow calls and the ones to be dropped for (int i = 0; i < 60; i++) { long time = System.currentTimeMillis(); envEdge.timeQ.put(time); CallRunner cr = getMockedCallRunner(time, 100); scheduler.dispatch(cr); } // make sure somewhat slow calls are handled waitUntilQueueEmpty(scheduler); Thread.sleep(100); assertTrue( "There should have been at least 12 calls dropped however there were " + scheduler.getNumGeneralCallsDropped(), scheduler.getNumGeneralCallsDropped() > 12); } finally { scheduler.stop(); } } // Get mocked call that has the CallRunner sleep for a while so that the fast // path isn't hit. private CallRunner getMockedCallRunner(long timestamp, final long sleepTime) throws IOException { ServerCall putCall = new ServerCall(1, null, null, RPCProtos.RequestHeader.newBuilder().setMethodName("mutate").build(), RequestConverter.buildMutateRequest(Bytes.toBytes("abc"), new Put(Bytes.toBytes("row"))), null, null, 9, null, null, timestamp, 0, null, null, null) { @Override public void sendResponseIfReady() throws IOException { } }; CallRunner cr = new CallRunner(null, putCall) { public void run() { if (sleepTime <= 0) return; try { LOG.warn("Sleeping for " + sleepTime); Thread.sleep(sleepTime); LOG.warn("Done Sleeping for " + sleepTime); } catch (InterruptedException e) { } } public RpcCall getRpcCall() { return putCall; } public void drop() { } }; return cr; } }
/* * Copyright 2020 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.pluginit.jdbc; import java.lang.reflect.Method; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.Driver; import java.sql.PreparedStatement; import java.sql.Statement; import java.util.Arrays; import java.util.Objects; import java.util.Properties; /** * @author Woonduk Kang(emeroad) */ public class DefaultJDBCApi implements JDBCApi { public final JDBCDriverClass jdbcDriverClass; public DefaultJDBCApi(JDBCDriverClass jdbcDriverClass) { this.jdbcDriverClass = Objects.requireNonNull(jdbcDriverClass, "jdbcDriverClass"); } @Override public JDBCDriverClass getJDBCDriverClass() { return jdbcDriverClass; } public DriverClass getDriver() { return new DefaultDriverClass(getJDBCDriverClass()); } public static class DefaultDriverClass implements DriverClass { private final JDBCDriverClass jdbcDriverClass; public DefaultDriverClass(JDBCDriverClass jdbcDriverClass) { this.jdbcDriverClass = Objects.requireNonNull(jdbcDriverClass, "jdbcDriverClass"); } protected Class<Driver> getConnection() { return jdbcDriverClass.getDriver(); } @Override public Method getConnect() { final Class<Driver> connection = getConnection(); return getDeclaredMethod(connection, "connect", String.class, Properties.class); } } public ConnectionClass getConnection() { return new DefaultConnectionClass(getJDBCDriverClass()); } public static class DefaultConnectionClass implements ConnectionClass { private final JDBCDriverClass jdbcDriverClass; public DefaultConnectionClass(JDBCDriverClass jdbcDriverClass) { this.jdbcDriverClass = Objects.requireNonNull(jdbcDriverClass, "jdbcDriverClass"); } protected Class<Connection> getConnection() { return jdbcDriverClass.getConnection(); } @Override public Method getSetAutoCommit() { final Class<Connection> connection = getConnection(); return getDeclaredMethod(connection, "setAutoCommit", boolean.class); } @Override public Method getPrepareStatement() { final Class<Connection> connection = getConnection(); return getDeclaredMethod(connection, "prepareStatement", String.class); } @Override public Method getPrepareCall() { final Class<Connection> connection = getConnection(); return getDeclaredMethod(connection, "prepareCall", String.class); } @Override public Method getCommit() { final Class<Connection> connection = getConnection(); return getDeclaredMethod(connection, "commit"); } } public StatementClass getStatement() { return new DefaultStatementClass(getJDBCDriverClass()); } public static class DefaultStatementClass implements StatementClass { final JDBCDriverClass jdbcDriverClass; public DefaultStatementClass(JDBCDriverClass jdbcDriverClass) { this.jdbcDriverClass = Objects.requireNonNull(jdbcDriverClass, "jdbcDriverClass"); } protected Class<Statement> getStatement() { return jdbcDriverClass.getStatement(); } @Override public Method getExecuteQuery() { final Class<Statement> statement = getStatement(); return getDeclaredMethod(statement, "executeQuery", String.class); } @Override public Method getExecuteUpdate() { final Class<Statement> statement = getStatement(); return getDeclaredMethod(statement, "executeUpdate", String.class); } } public PreparedStatementClass getPreparedStatement() { return new DefaultPreparedStatementClass(getJDBCDriverClass()); } public static class DefaultPreparedStatementClass implements PreparedStatementClass { private final JDBCDriverClass jdbcDriverClass; public DefaultPreparedStatementClass(JDBCDriverClass jdbcDriverClass) { this.jdbcDriverClass = Objects.requireNonNull(jdbcDriverClass, "jdbcDriverClass"); } protected Class<PreparedStatement> getPreparedStatement() { return jdbcDriverClass.getPreparedStatement(); } @Override public Method getExecute() { final Class<PreparedStatement> statement = getPreparedStatement(); return getDeclaredMethod(statement, "execute"); } @Override public Method getExecuteQuery() { final Class<PreparedStatement> statement = getPreparedStatement(); return getDeclaredMethod(statement, "executeQuery"); } } @Override public CallableStatementClass getCallableStatement() { return new DefaultCallableStatementClass(getJDBCDriverClass()); } public static class DefaultCallableStatementClass implements CallableStatementClass { private final JDBCDriverClass jdbcDriverClass; public DefaultCallableStatementClass(JDBCDriverClass jdbcDriverClass) { this.jdbcDriverClass = Objects.requireNonNull(jdbcDriverClass, "jdbcDriverClass"); } protected Class<CallableStatement> getCallableStatement() { return jdbcDriverClass.getCallableStatement(); } @Override public Method getRegisterOutParameter() { final Class<CallableStatement> callableStatement = getCallableStatement(); return getDeclaredMethod(callableStatement, "registerOutParameter", int.class, int.class); } @Override public Method getExecute() { final Class<CallableStatement> callableStatement = getCallableStatement(); return getDeclaredMethod(callableStatement, "execute"); } @Override public Method getExecuteQuery() { final Class<CallableStatement> callableStatement = getCallableStatement(); return getDeclaredMethod(callableStatement, "executeQuery"); } } public static Method getDeclaredMethod(Class<?> clazz, String name, Class<?>... parameterTypes) { try { return clazz.getDeclaredMethod(name, parameterTypes); } catch (NoSuchMethodException e) { throw new IllegalArgumentException(clazz.getName() + "." + name + Arrays.toString(parameterTypes), e); } } }
// Licensed under the Apache License, Version 2.0 package com.flingtap.done; import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.Map; import com.flingtap.common.HandledException; import com.flingtap.done.AttachmentListAdapterDelegate.UriMappings; import com.flingtap.done.provider.Task; import com.flingtap.done.provider.Task.TaskAttachments; import com.flurry.android.FlurryAgent; import com.flingtap.done.base.R; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.ComponentName; import android.content.ContentResolver; import android.content.ContentUris; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.database.Cursor; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.provider.Contacts; import android.provider.Contacts.ContactMethods; import android.provider.Contacts.ContactMethodsColumns; import android.util.Log; import android.util.SparseArray; import android.view.ContextMenu; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.CheckBox; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; /** * TODO: Refactor this code so that it sub-classes DefaultAttachHandler since there is alot of overlap. */ public class NearminderAttachHandler extends AttachmentListAdapterDelegate implements View.OnCreateContextMenuListener { public static final String TAG = "NearminderAttachHandler"; private static final int FIRST_CODE_ID = 600; public int getFirstCodeId() { return FIRST_CODE_ID; } // Item IDs protected static final int PROXIMITY_MENU_VIEW_ITEM_ID = FIRST_CODE_ID + 5; protected static final int PROXIMITY_MENU_EDIT_ITEM_ID = FIRST_CODE_ID + 10; protected static final int PROXIMITY_MENU_DELETE_ITEM_ID = FIRST_CODE_ID + 15; protected static final int PROXIMITY_MENU_RENAME_ITEM_ID = FIRST_CODE_ID + 20; // Result Codes protected static final int DELETE_PROXIMITY_ALERT_REQUEST = FIRST_CODE_ID + 99; protected static final int RENAME_DIALOG_ID = FIRST_CODE_ID + 50; protected static final int DIALOG_REMOVE_ATTACHMENT_ID = FIRST_CODE_ID + 51; public NearminderAttachHandler(){ uriMappings = new UriMappings[1]; uriMappings[0] = new UriMappings(); uriMappings[0].authority = Task.AUTHORITY; uriMappings[0].pathPattern = "proximity/#"; uriMappings[0].code = 0; // Uniquely identifies this mapping. Some Attachment handlers may handle multiple different mime-types so this allows us to distinguish between them. The value is passed into bindView(..) } @Override protected void bindView(View view, Context context, Cursor cursor, int code, Uri data) { try{ // Hide checkbox CheckBox checkbox = (CheckBox) view.findViewById(R.id.checkbox); checkbox.setVisibility(View.GONE); // Hide second line TextView secondLineText = (TextView) view.findViewById(R.id.second_line); secondLineText.setVisibility(View.GONE); // ************************************** // Nearminder Name // ************************************** TextView singleLineText = (TextView) view.findViewById(R.id.first_line); singleLineText.setMinimumHeight(64); String name = cursor.getString(TaskAttachmentListTab.PROJ_ATTACH_NAME_INDEX); singleLineText.setText(name); // ************************************** // Nearminder primary icon // ************************************** ImageView primaryImage = (ImageView) view.findViewById(R.id.left_icon); primaryImage.setImageResource(R.drawable.ic_launcher_nearminder); }catch(HandledException h){ // Ignore. }catch(Exception exp){ Log.e(TAG, "ERR00043", exp); ErrorUtil.handleException("ERR00043", exp, context); } } public void onCreateContextMenu(ContextMenu menu, View view, ContextMenu.ContextMenuInfo menuInfo){ //Log.v(TAG, "onCreateContextMenu(..) called."); AdapterView.AdapterContextMenuInfo adapterMenuInfo = (AdapterView.AdapterContextMenuInfo)menuInfo; TextView singleLineText = (TextView) adapterMenuInfo.targetView.findViewById(R.id.first_line); menu.setHeaderTitle(singleLineText.getText()); // ******************* // Add "View" menu item. // ******************* addViewAttachmentMenuItems(menu, PROXIMITY_MENU_VIEW_ITEM_ID, R.string.context_viewNearminder); // ******************* // "Edit" menu item. // ******************* MenuItem editProximityAlertMenuItem = null; editProximityAlertMenuItem = menu.add(8, PROXIMITY_MENU_EDIT_ITEM_ID, PROXIMITY_MENU_EDIT_ITEM_ID, R.string.context_editNearminder); // ******************* // "Remove" menu item. // ******************* MenuItem removeProximityAlertMenuItem = null; removeProximityAlertMenuItem = menu.add(8, PROXIMITY_MENU_DELETE_ITEM_ID, PROXIMITY_MENU_DELETE_ITEM_ID, R.string.context_deleteNearminder); // ******************* // Add "Rename" menu item. // ******************* addRenameAttachmentMenuItems(menu, PROXIMITY_MENU_RENAME_ITEM_ID); } Uri mAttachmentUri = null; Uri mProxAlertUri = null; public boolean onContextItemSelected(MenuItem item) { //Log.v(TAG, "onContextItemSelected(..)"); AdapterView.AdapterContextMenuInfo mMenuInfo = (AdapterView.AdapterContextMenuInfo)item.getMenuInfo(); Object[] tagArray = (Object[])((mMenuInfo.targetView).getTag()); mProxAlertUri = (Uri)(tagArray[AttachmentListAdapter.TAG_ATTACHMENTS_URI_INDEX]); switch (item.getItemId()) { case PROXIMITY_MENU_VIEW_ITEM_ID: Event.onEvent(Event.NEARMINDER_ATTACHMENT_HANDLER_VIEW_CONTEXT_MENU_ITEM_CLICKED, null); mActivity.startActivity(new Intent(Intent.ACTION_VIEW, mProxAlertUri)); return true; case PROXIMITY_MENU_EDIT_ITEM_ID: Event.onEvent(Event.NEARMINDER_ATTACHMENT_HANDLER_EDIT_CONTEXT_MENU_ITEM_CLICKED, null); Intent nearminderIntent = new Intent(Intent.ACTION_EDIT, mProxAlertUri); mActivity.startActivity(nearminderIntent); return true; case PROXIMITY_MENU_DELETE_ITEM_ID: // Prepare event info. Event.onEvent(Event.NEARMINDER_ATTACHMENT_HANDLER_DELETE_CONTEXT_MENU_ITEM_CLICKED, null); mActivity.showDialog(DIALOG_REMOVE_ATTACHMENT_ID); return true; case PROXIMITY_MENU_RENAME_ITEM_ID: mAttachmentUri = ContentUris.withAppendedId(TaskAttachments.CONTENT_URI, mMenuInfo.id); mActivity.showDialog(RENAME_DIALOG_ID); // Prepare event info. Event.onEvent(Event.NEARMINDER_ATTACHMENT_HANDLER_RENAME_CONTEXT_MENU_ITEM_CLICKED, null); return true; default: ErrorUtil.handle("ERR00044", "onContextItemSelected() called with unknown item id" + item.getItemId(), this); } return false; } public boolean onCreateOptionsMenu(Menu menu) { return false; } public boolean onPrepareOptionsMenu(Menu menu) { return false; } public void onActivityResult(int requestCode, int resultCode, Intent data) { if( resultCode == SharedConstant.RESULT_ERROR ){ ErrorUtil.notifyUser(mActivity); return; } } private WeakReference<AttachmentPart.RenameOnTextSetListener> renameOnTextSetListener = null; public Dialog onCreateDialog(int dialogId){ Dialog dialog = null; try{ //Log.v(TAG, "onCreateDialog() called"); if (mManagedDialogs == null) { mManagedDialogs = new SparseArray<Dialog>(); } switch(dialogId){ case RENAME_DIALOG_ID: if( null == mAttachmentUri ){ // TODO: !! This check may be unecessary compensation for an error that occured before where mAttachmentUri wasn't restored correctly. Log.e(TAG, "ERR000BB"); ErrorUtil.handleExceptionNotifyUser("ERR000BB", (Exception)(new Exception( )).fillInStackTrace(), mActivity); return dialog; // I know, bad,, but what else to do here? } AttachmentPart.RenameOnTextSetListener listener = new AttachmentPart.RenameOnTextSetListener(mActivity, mAttachmentUri); renameOnTextSetListener = new WeakReference<AttachmentPart.RenameOnTextSetListener>(listener); dialog = AttachmentPart.onCreateDialogRenameDialog(mActivity, dialogId, mAttachmentUri, listener); mManagedDialogs.put(dialogId, dialog); break; case DIALOG_REMOVE_ATTACHMENT_ID: dialog = new AlertDialog.Builder(mActivity) .setTitle(R.string.dialog_confirmDelete) .setIcon(android.R.drawable.ic_dialog_alert) .setMessage(R.string.dialog_areYouSure) .setPositiveButton(R.string.button_yes, new android.content.DialogInterface.OnClickListener(){ public void onClick(DialogInterface dialog, int whichButton){ try{ if( whichButton == android.content.DialogInterface.BUTTON_POSITIVE){ if( !Nearminder.delete(mActivity, mProxAlertUri) ){ ErrorUtil.notifyUser(mActivity); // Error already handled, so just notify user. } // Notify the ListAdapter that it's cursor needs refreshing notifyDataSetChanged(); // TODO: !! Isn't this a hack to get around the normal observer thing. } }catch(HandledException h){ // Ignore. }catch(Exception exp){ Log.e(TAG, "ERR00012", exp); ErrorUtil.handleExceptionNotifyUser("ERR00012", exp, mActivity); } } }) .setNegativeButton(R.string.button_no, null) .create(); mManagedDialogs.put(dialogId, dialog); break; } }catch(HandledException h){ // Ignore. }catch(Exception exp){ Log.e(TAG, "ERR000BB", exp); ErrorUtil.handleExceptionNotifyUser("ERR000BB", exp, mActivity); } return dialog; } public void onPrepareDialog(int dialogId, Dialog dialog){ try{ //Log.v(TAG, "onPrepareDialog() called"); switch(dialogId){ case RENAME_DIALOG_ID: assert null != renameOnTextSetListener; AttachmentPart.RenameOnTextSetListener listener = renameOnTextSetListener.get(); assert null != mAttachmentUri; listener.setAttachmentUri(mAttachmentUri); AttachmentPart.onPrepareDialogRenameDialog(mActivity, dialog, mAttachmentUri); break; } }catch(HandledException h){ // Ignore. }catch(Exception exp){ Log.e(TAG, "ERR000BC", exp); ErrorUtil.handleExceptionNotifyUser("ERR000BC", exp, mActivity); dialog.dismiss(); // TODO: Is this the right method to use here? } } public boolean hasInstanceState() { return true; } private static final String SAVE_ATTACHMENT_URI = "NearminderAttachHandler.SAVE_ATTACHMENT_URI"; private static final String SAVE_PROXIMITY_ALERT_URI = "NearminderAttachHandler.SAVE_PROXIMITY_ALERT_URI"; private SparseArray<Dialog> mManagedDialogs; public void onSaveInstanceState (Bundle outState){ try{ //Log.v(TAG, "onSaveInstanceState() called"); if( null != mAttachmentUri ){ outState.putParcelable(SAVE_ATTACHMENT_URI, mAttachmentUri); } if( null != mProxAlertUri ){ outState.putParcelable(SAVE_PROXIMITY_ALERT_URI, mProxAlertUri); } saveManagedDialogs(outState, mManagedDialogs, TAG); }catch(HandledException h){ // Ignore. }catch(Exception exp){ Log.e(TAG, "ERR000BD", exp); ErrorUtil.handleExceptionFinish("ERR000BD", exp, mActivity); } } public void onRestoreInstanceState (Bundle savedInstanceState){ try{ //Log.v(TAG, "onRestoreInstanceState() called"); if( savedInstanceState.containsKey(SAVE_ATTACHMENT_URI) ){ mAttachmentUri = savedInstanceState.getParcelable(SAVE_ATTACHMENT_URI) ; } if( savedInstanceState.containsKey(SAVE_PROXIMITY_ALERT_URI) ){ mProxAlertUri = savedInstanceState.getParcelable(SAVE_PROXIMITY_ALERT_URI); } restoreManagedDialogs(savedInstanceState, mManagedDialogs, TAG); // throw new RuntimeException("TEST"); // TODO: !!! An exception thrown here, results in a 0 result code in TaskList. Why is the response code from handleExceptionFinish(..) ignored? }catch(HandledException h){ // Ignore. }catch(Exception exp){ Log.e(TAG, "ERR000BE", exp); ErrorUtil.handleExceptionFinish("ERR000BE", exp, mActivity); } } public void onDestroy() { onDestroy(mManagedDialogs); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.authorities.authorities.amazons3; import java.io.IOException; import java.io.InterruptedIOException; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.manifoldcf.authorities.interfaces.AuthorizationResponse; import org.apache.manifoldcf.authorities.system.Logging; import org.apache.manifoldcf.core.interfaces.ConfigNode; import org.apache.manifoldcf.core.interfaces.ConfigParams; import org.apache.manifoldcf.core.interfaces.IHTTPOutput; import org.apache.manifoldcf.core.interfaces.IPasswordMapperActivity; import org.apache.manifoldcf.core.interfaces.IPostParameters; import org.apache.manifoldcf.core.interfaces.IThreadContext; import org.apache.manifoldcf.core.interfaces.ManifoldCFException; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.AccessControlList; import com.amazonaws.services.s3.model.Bucket; import com.amazonaws.services.s3.model.CanonicalGrantee; import com.amazonaws.services.s3.model.Grant; import com.amazonaws.services.s3.model.Grantee; import com.amazonaws.services.s3.model.Owner; /** * Authority connector for Amazons3 * @author Kuhajeyan * */ public class AmazonS3Authority extends org.apache.manifoldcf.authorities.authorities.BaseAuthorityConnector { private static final String TAB_NAME = "TabName"; protected long lastSessionFetch = -1L; protected static final long timeToRelease = 300000L; protected AmazonS3 amazonS3; protected boolean connected = false; protected String amazons3ProxyHost = null; protected String amazons3ProxyPort = null; protected String amazons3ProxyDomain = null; protected String amazons3ProxyUserName = null; protected String amazons3ProxyPassword = null; protected String amazons3AwsAccessKey = null; protected String amazons3AwsSecretKey = null; public AmazonS3Authority() { } @Override public void disconnect() throws ManifoldCFException { amazons3AwsAccessKey = null; amazons3AwsSecretKey = null; amazons3ProxyHost = null; amazons3ProxyPort = null; amazons3ProxyDomain = null; amazons3ProxyUserName = null; amazons3ProxyPassword = null; } @Override public void connect(ConfigParams configParams) { super.connect(configParams); // aws access and secret keys amazons3AwsAccessKey = configParams .getParameter(AmazonS3Config.AWS_ACCESS_KEY); amazons3AwsSecretKey = configParams .getObfuscatedParameter(AmazonS3Config.AWS_SECRET_KEY); // proxy values amazons3ProxyHost = configParams .getParameter(AmazonS3Config.AMAZONS3_PROXY_HOST); amazons3ProxyPort = configParams .getParameter(AmazonS3Config.AMAZONS3_PROXY_PORT); amazons3ProxyDomain = configParams .getParameter(AmazonS3Config.AMAZONS3_PROXY_DOMAIN); amazons3ProxyUserName = configParams .getParameter(AmazonS3Config.AMAZONS3_PROXY_USERNAME); amazons3ProxyPassword = configParams .getObfuscatedParameter(AmazonS3Config.AMAZONS3_PROXY_PASSWORD); } /** * Test the connection. Returns a string describing the connection * integrity. * * @return the connection's status as a displayable string. */ @Override public String check() throws ManifoldCFException { // connect with amazons3 client Logging.authorityConnectors.info("Checking connection"); try { // invokes the check thread CheckThread checkThread = new CheckThread(getClient()); checkThread.start(); checkThread.join(); if (checkThread.getException() != null) { Throwable thr = checkThread.getException(); return "Check exception: " + thr.getMessage(); } return checkThread.getResult(); } catch (InterruptedException ex) { Logging.authorityConnectors.error( "Error while checking connection", ex); throw new ManifoldCFException(ex.getMessage(), ex, ManifoldCFException.INTERRUPTED); } } /** * Get the Amazons3 client, relevant access keys should have been posted * already * @return */ protected AmazonS3 getClient() { if (amazonS3 == null) { try { BasicAWSCredentials awsCreds = new BasicAWSCredentials( amazons3AwsAccessKey, amazons3AwsSecretKey); amazonS3 = new AmazonS3Client(awsCreds); } catch (Exception e) { Logging.authorityConnectors.error( "Error while amazon s3 connectionr", e); } } lastSessionFetch = System.currentTimeMillis(); return amazonS3; } @Override public boolean isConnected() { return amazonS3 != null && amazonS3.getS3AccountOwner() != null; } @Override public void poll() throws ManifoldCFException { if (lastSessionFetch == -1L) { return; } long currentTime = System.currentTimeMillis(); if (currentTime >= lastSessionFetch + timeToRelease) { amazonS3 = null; lastSessionFetch = -1L; } } private void fillInServerConfigurationMap(Map<String, Object> out, IPasswordMapperActivity mapper, ConfigParams parameters) { String amazons3AccessKey = parameters .getParameter(AmazonS3Config.AWS_ACCESS_KEY); String amazons3SecretKey = parameters .getParameter(AmazonS3Config.AWS_SECRET_KEY); // default values if (amazons3AccessKey == null) amazons3AccessKey = AmazonS3Config.AMAZONS3_AWS_ACCESS_KEY_DEFAULT; if (amazons3SecretKey == null) amazons3SecretKey = AmazonS3Config.AMAZONS3_AWS_SECRET_KEY_DEFAULT; else amazons3SecretKey = mapper.mapPasswordToKey(amazons3SecretKey); // fill the map out.put("AMAZONS3_AWS_ACCESS_KEY", amazons3AccessKey); out.put("AMAZONS3_AWS_SECRET_KEY", amazons3SecretKey); } private void fillInProxyConfigurationMap(Map<String, Object> out, IPasswordMapperActivity mapper, ConfigParams parameters) { String amazons3ProxyHost = parameters .getParameter(AmazonS3Config.AMAZONS3_PROXY_HOST); String amazons3ProxyPort = parameters .getParameter(AmazonS3Config.AMAZONS3_PROXY_PORT); String amazons3ProxyDomain = parameters .getParameter(AmazonS3Config.AMAZONS3_PROXY_DOMAIN); String amazons3ProxyUserName = parameters .getParameter(AmazonS3Config.AMAZONS3_PROXY_USERNAME); String amazons3ProxyPassword = parameters .getObfuscatedParameter(AmazonS3Config.AMAZONS3_PROXY_PASSWORD); if (amazons3ProxyHost == null) amazons3ProxyHost = AmazonS3Config.AMAZONS3_PROXY_HOST_DEFAULT; if (amazons3ProxyPort == null) amazons3ProxyPort = AmazonS3Config.AMAZONS3_PROXY_PORT_DEFAULT; if (amazons3ProxyDomain == null) amazons3ProxyDomain = AmazonS3Config.AMAZONS3_PROXY_DOMAIN_DEFAULT; if (amazons3ProxyUserName == null) amazons3ProxyUserName = AmazonS3Config.AMAZONS3_PROXY_USERNAME_DEFAULT; if (amazons3ProxyPassword == null) amazons3ProxyPassword = AmazonS3Config.AMAZONS3_PROXY_PASSWORD_DEFAULT; else amazons3ProxyPassword = mapper .mapPasswordToKey(amazons3ProxyPassword); // fill the map out.put("AMAZONS3_PROXY_HOST", amazons3ProxyHost); out.put("AMAZONS3_PROXY_PORT", amazons3ProxyPort); out.put("AMAZONS3_PROXY_DOMAIN", amazons3ProxyDomain); out.put("AMAZONS3_PROXY_USERNAME", amazons3ProxyUserName); out.put("AMAZONS3_PROXY_PWD", amazons3ProxyPassword); } /** * View configuration. This method is called in the body section of the * connector's view configuration page. Its purpose is to present the * connection information to the user. The coder can presume that the HTML * that is output from this configuration will be within appropriate <html> * and <body> tags. * * */ public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException { Map<String, Object> paramMap = new HashMap<String, Object>(); // Fill in map from each tab fillInServerConfigurationMap(paramMap, out, parameters); fillInProxyConfigurationMap(paramMap, out, parameters); Messages.outputResourceWithVelocity(out, locale, AmazonS3Config.VIEW_CONFIG_FORWARD, paramMap); } /** * Output the configuration header section. This method is called in the * head section of the connector's configuration page. Its purpose is to add * the required tabs to the list, and to output any javascript methods that * might be needed by the configuration editing HTML. * */ @Override public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException { // Add the Server tab tabsArray.add(Messages.getString(locale, AmazonS3Config.AMAZONS3_SERVER_TAB_PROPERTY)); // Add the Proxy tab tabsArray.add(Messages.getString(locale, AmazonS3Config.AMAZONS3_PROXY_TAB_PROPERTY)); // Map the parameters Map<String, Object> paramMap = new HashMap<String, Object>(); // Fill in the parameters from each tab fillInServerConfigurationMap(paramMap, out, parameters); fillInProxyConfigurationMap(paramMap, out, parameters); // Output the Javascript - only one Velocity template for all tabs Messages.outputResourceWithVelocity(out, locale, AmazonS3Config.EDIT_CONFIG_HEADER_FORWARD, paramMap); } @Override public void outputConfigurationBody(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException { // Call the Velocity templates for each tab Map<String, Object> paramMap = new HashMap<String, Object>(); // Set the tab name paramMap.put(TAB_NAME, tabName); // Fill in the parameters fillInServerConfigurationMap(paramMap, out, parameters); fillInProxyConfigurationMap(paramMap, out, parameters); // Server tab Messages.outputResourceWithVelocity(out, locale, AmazonS3Config.EDIT_CONFIG_FORWARD_SERVER, paramMap); // Proxy tab Messages.outputResourceWithVelocity(out, locale, AmazonS3Config.EDIT_CONFIG_FORWARD_PROXY, paramMap); } /** * Process a configuration post. This method is called at the start of the * connector's configuration page, whenever there is a possibility that form * data for a connection has been posted. Its purpose is to gather form * information and modify the configuration parameters accordingly. The name * of the posted form is "editconnection". * */ @Override public String processConfigurationPost(IThreadContext threadContext, IPostParameters variableContext, Locale locale, ConfigParams parameters) throws ManifoldCFException { // server tab String awsAccessKey = variableContext.getParameter("aws_access_key"); if (awsAccessKey != null) { parameters .setParameter(AmazonS3Config.AWS_ACCESS_KEY, awsAccessKey); } String awsSecretKey = variableContext.getParameter("aws_secret_key"); if (awsSecretKey != null) { // set as obfuscated parameter parameters.setObfuscatedParameter(AmazonS3Config.AWS_SECRET_KEY, awsSecretKey); } Logging.authorityConnectors.info("Saved values for aws keys"); int i = 0; while (i < parameters.getChildCount()) { ConfigNode cn = parameters.getChild(i); if (cn.getType().equals(AmazonS3Config.AWS_ACCESS_KEY) || cn.getType().equals(AmazonS3Config.AWS_SECRET_KEY)) parameters.removeChild(i); else i++; } // proxy tab String amazons3ProxyHost = variableContext .getParameter("amazons3_proxy_host"); if (amazons3ProxyHost != null) { parameters.setParameter(AmazonS3Config.AMAZONS3_PROXY_HOST, amazons3ProxyHost); } String amazons3ProxyPort = variableContext .getParameter("amazons3_proxy_port"); if (amazons3ProxyPort != null) { parameters.setParameter(AmazonS3Config.AMAZONS3_PROXY_PORT, amazons3ProxyPort); } String amazons3ProxyDomain = variableContext .getParameter("amazons3_proxy_domain"); if (amazons3ProxyDomain != null) { parameters.setParameter(AmazonS3Config.AMAZONS3_PROXY_DOMAIN, amazons3ProxyDomain); } String amazons3ProxyUserName = variableContext .getParameter("amazons3_proxy_username"); if (amazons3ProxyUserName != null) { parameters.setParameter(AmazonS3Config.AMAZONS3_PROXY_USERNAME, amazons3ProxyUserName); } String amazons3ProxyPassword = variableContext .getParameter("amazons3_proxy_pwd"); if (amazons3ProxyPassword != null) { // set as obfuscated parameter parameters.setObfuscatedParameter( AmazonS3Config.AMAZONS3_PROXY_USERNAME, amazons3ProxyUserName); } return null; } @Override public AuthorizationResponse getAuthorizationResponse(String userName) throws ManifoldCFException { try { HashMap<String, Set<Grant>> checkUserExists = checkUserExists(userName); if (isUserAvailable(userName, checkUserExists.values())) { return new AuthorizationResponse(new String[] { userName }, AuthorizationResponse.RESPONSE_OK); } } catch (Exception e) { Logging.authorityConnectors.error("Error while getting authorization response",e); return RESPONSE_UNREACHABLE; } return RESPONSE_USERNOTFOUND; } private boolean isUserAvailable(String userName, Collection<Set<Grant>> collection) { String[] users = getUsers(collection); return Arrays.asList(users).contains(userName); } private String[] getUsers(Collection<Set<Grant>> collection) { Set<String> users = new HashSet<String>();// no duplicates for (Collection c : collection) { Set<Grant> c1 = (Set<Grant>) c; for (Grant grant : c1) { if (grant != null && grant.getGrantee() != null) { Grantee grantee = grant.getGrantee(); if (grantee instanceof CanonicalGrantee) { users.add(((CanonicalGrantee) grantee).getDisplayName()); } else { users.add(grantee.getIdentifier()); } } } } return users.toArray(new String[users.size()]); } private HashMap<String, Set<Grant>> checkUserExists(String userName) throws ManifoldCFException { GrantsThread t = new GrantsThread(getClient()); try { t.start(); t.finishUp(); return t.getResult(); } catch (InterruptedException e) { t.interrupt(); throw new ManifoldCFException("Interrupted: " + e.getMessage(), e, ManifoldCFException.INTERRUPTED); } catch (java.net.SocketTimeoutException e) { handleIOException(e); } catch (InterruptedIOException e) { t.interrupt(); handleIOException(e); } catch (IOException e) { handleIOException(e); } catch (ResponseException e) { handleResponseException(e); } return null; } /** * Obtain the default access tokens for a given user name. * @param userName is the user name or identifier. * @return the default response tokens, presuming that the connect method * fails. */ @Override public AuthorizationResponse getDefaultAuthorizationResponse(String userName) { return RESPONSE_UNREACHABLE; } private static void handleIOException(IOException e) throws ManifoldCFException { if (!(e instanceof java.net.SocketTimeoutException) && (e instanceof InterruptedIOException)) { throw new ManifoldCFException("Interrupted: " + e.getMessage(), e, ManifoldCFException.INTERRUPTED); } Logging.authorityConnectors.warn( "JIRA: IO exception: " + e.getMessage(), e); throw new ManifoldCFException("IO exception: " + e.getMessage(), e); } private static void handleResponseException(ResponseException e) throws ManifoldCFException { throw new ManifoldCFException("Response exception: " + e.getMessage(), e); } protected static class GrantsThread extends Thread { protected Throwable exception = null; protected boolean result = false; protected AmazonS3 amazonS3 = null; private HashMap<String, Set<Grant>> grants; public GrantsThread(AmazonS3 amazonS3) { super(); this.amazonS3 = amazonS3; setDaemon(true); grants = new HashMap<String, Set<Grant>>(); } public void finishUp() throws InterruptedException, IOException, ResponseException { join(); Throwable thr = exception; if (thr != null) { if (thr instanceof IOException) { throw (IOException) thr; } else if (thr instanceof ResponseException) { throw (ResponseException) thr; } else if (thr instanceof RuntimeException) { throw (RuntimeException) thr; } else { throw (Error) thr; } } } @Override public void run() { List<Bucket> listBuckets = amazonS3.listBuckets(); for (Bucket bucket : listBuckets) { AccessControlList bucketAcl = amazonS3.getBucketAcl(bucket .getName()); if (bucketAcl != null) grants.put(bucket.getName(), bucketAcl.getGrants()); } } public HashMap<String, Set<Grant>> getResult() { return grants; } } protected static class CheckThread extends Thread { protected String result = "Unknown"; protected AmazonS3 s3 = null; protected Throwable exception = null; public CheckThread(AmazonS3 s3) { this.s3 = s3; } public String getResult() { return result; } public Throwable getException() { return exception; } @Override public void run() { try { if (s3 != null) { Owner s3AccountOwner = s3.getS3AccountOwner(); if (s3AccountOwner != null) { result = StringUtils.isNotEmpty(s3AccountOwner .getDisplayName()) ? "Connection OK" : "Connection Failed"; } } } catch (AmazonServiceException e) { result = "Connection Failed : " + e.getMessage(); exception = e; Logging.authorityConnectors.error(e); } catch (AmazonClientException e) { result = "Connection Failed : " + e.getMessage(); exception = e; Logging.authorityConnectors.error(e); } } } }
package core.framework.mongo.impl; import com.mongodb.ReadPreference; import com.mongodb.bulk.BulkWriteResult; import com.mongodb.client.AggregateIterable; import com.mongodb.client.FindIterable; import com.mongodb.client.MapReduceIterable; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoIterable; import com.mongodb.client.model.BulkWriteOptions; import com.mongodb.client.model.CountOptions; import com.mongodb.client.model.DeleteOneModel; import com.mongodb.client.model.Filters; import com.mongodb.client.model.InsertManyOptions; import com.mongodb.client.model.ReplaceOneModel; import com.mongodb.client.model.ReplaceOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; import core.framework.internal.validate.Validator; import core.framework.log.ActionLogContext; import core.framework.log.Markers; import core.framework.mongo.Aggregate; import core.framework.mongo.Collection; import core.framework.mongo.Count; import core.framework.mongo.FindOne; import core.framework.mongo.Get; import core.framework.mongo.MapReduce; import core.framework.mongo.MongoCollection; import core.framework.mongo.Query; import core.framework.util.Lists; import core.framework.util.StopWatch; import core.framework.util.Strings; import org.bson.BsonDocument; import org.bson.conversions.Bson; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.Collectors; /** * @author neo */ class MongoCollectionImpl<T> implements MongoCollection<T> { private final Logger logger = LoggerFactory.getLogger(MongoCollectionImpl.class); private final MongoImpl mongo; private final Class<T> entityClass; private final String collectionName; private final Validator<T> validator; private com.mongodb.client.MongoCollection<T> collection; MongoCollectionImpl(MongoImpl mongo, Class<T> entityClass) { this.mongo = mongo; this.entityClass = entityClass; validator = Validator.of(entityClass); collectionName = entityClass.getDeclaredAnnotation(Collection.class).name(); } @Override public long count(Count count) { var watch = new StopWatch(); Bson filter = count.filter == null ? new BsonDocument() : count.filter; try { return collection(count.readPreference).countDocuments(filter, new CountOptions().maxTime(mongo.timeoutInMs, TimeUnit.MILLISECONDS)); } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, 1, 0); logger.debug("count, collection={}, filter={}, readPref={}, elapsed={}", collectionName, new BsonLogParam(filter, mongo.registry), count.readPreference == null ? null : count.readPreference.getName(), elapsed); checkSlowOperation(elapsed); } } @Override public void insert(T entity) { var watch = new StopWatch(); validator.validate(entity, false); try { collection().insertOne(entity); } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, 0, 1); logger.debug("insert, collection={}, elapsed={}", collectionName, elapsed); checkSlowOperation(elapsed); } } @Override public void bulkInsert(List<T> entities) { var watch = new StopWatch(); if (entities == null || entities.isEmpty()) throw new Error("entities must not be empty"); for (T entity : entities) validator.validate(entity, false); try { collection().insertMany(entities, new InsertManyOptions().ordered(false)); } finally { long elapsed = watch.elapsed(); int size = entities.size(); ActionLogContext.track("mongo", elapsed, 0, size); logger.debug("bulkInsert, collection={}, size={}, elapsed={}", collectionName, size, elapsed); checkSlowOperation(elapsed); } } @Override public Optional<T> get(Get get) { var watch = new StopWatch(); if (get.id == null) throw new Error("get.id must not be null"); int returnedDocs = 0; try { T result = collection(get.readPreference).find(Filters.eq("_id", get.id)).first(); if (result != null) returnedDocs = 1; return Optional.ofNullable(result); } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, returnedDocs, 0); logger.debug("get, collection={}, id={}, readPref={}, returnedDocs={}, elapsed={}", collectionName, get.id, get.readPreference == null ? null : get.readPreference.getName(), returnedDocs, elapsed); checkSlowOperation(elapsed); } } @Override public Optional<T> findOne(FindOne findOne) { var watch = new StopWatch(); Bson filter = findOne.filter == null ? new BsonDocument() : findOne.filter; int returnedDocs = 0; try { List<T> results = new ArrayList<>(2); FindIterable<T> query = collection() .find(filter) .limit(2) .maxTime(mongo.timeoutInMs, TimeUnit.MILLISECONDS); fetch(query, results); if (results.isEmpty()) return Optional.empty(); if (results.size() > 1) throw new Error("more than one row returned"); returnedDocs = 1; return Optional.of(results.get(0)); } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, returnedDocs, 0); logger.debug("findOne, collection={}, filter={}, readPref={}, returnedDocs={}, elapsed={}", collectionName, new BsonLogParam(filter, mongo.registry), findOne.readPreference == null ? null : findOne.readPreference.getName(), returnedDocs, elapsed); checkSlowOperation(elapsed); } } @Override public List<T> find(Query query) { var watch = new StopWatch(); List<T> results = query.limit == null ? new ArrayList<>() : new ArrayList<>(query.limit); try { FindIterable<T> mongoQuery = mongoQuery(query).maxTime(mongo.timeoutInMs, TimeUnit.MILLISECONDS); fetch(mongoQuery, results); checkTooManyRowsReturned(results.size()); return results; } finally { long elapsed = watch.elapsed(); int size = results.size(); ActionLogContext.track("mongo", elapsed, size, 0); logger.debug("find, collection={}, filter={}, projection={}, sort={}, skip={}, limit={}, readPref={}, returnedDocs={}, elapsed={}", collectionName, new BsonLogParam(query.filter, mongo.registry), new BsonLogParam(query.projection, mongo.registry), new BsonLogParam(query.sort, mongo.registry), query.skip, query.limit, query.readPreference == null ? null : query.readPreference.getName(), size, elapsed); checkSlowOperation(elapsed); } } @Override public void forEach(Query query, Consumer<T> consumer) { var watch = new StopWatch(); long start = System.nanoTime(); long mongoTook = 0; int returnedDocs = 0; try (MongoCursor<T> cursor = mongoQuery(query).iterator()) { mongoTook += System.nanoTime() - start; start = System.nanoTime(); while (cursor.hasNext()) { T result = cursor.next(); returnedDocs++; mongoTook += System.nanoTime() - start; consumer.accept(result); start = System.nanoTime(); } } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", mongoTook, returnedDocs, 0); logger.debug("forEach, collection={}, filter={}, projection={}, sort={}, skip={}, limit={}, readPref={}, returnedDocs={}, mongoTook={}, elapsed={}", collectionName, new BsonLogParam(query.filter, mongo.registry), new BsonLogParam(query.projection, mongo.registry), new BsonLogParam(query.sort, mongo.registry), query.skip, query.limit, query.readPreference == null ? null : query.readPreference.getName(), returnedDocs, mongoTook, elapsed); } } @Override public <V> List<V> aggregate(Aggregate<V> aggregate) { var watch = new StopWatch(); if (aggregate.pipeline == null || aggregate.pipeline.isEmpty()) throw new Error("aggregate.pipeline must not be empty"); if (aggregate.resultClass == null) throw new Error("aggregate.resultClass must not be null"); List<V> results = Lists.newArrayList(); try { AggregateIterable<V> query = collection(aggregate.readPreference) .aggregate(aggregate.pipeline, aggregate.resultClass) .maxTime(mongo.timeoutInMs, TimeUnit.MILLISECONDS); fetch(query, results); checkTooManyRowsReturned(results.size()); return results; } finally { long elapsed = watch.elapsed(); int size = results.size(); ActionLogContext.track("mongo", elapsed, size, 0); logger.debug("aggregate, collection={}, pipeline={}, readPref={}, returnedDocs={}, elapsed={}", collectionName, aggregate.pipeline.stream().map(stage -> new BsonLogParam(stage, mongo.registry)).collect(Collectors.toList()), aggregate.readPreference == null ? null : aggregate.readPreference.getName(), size, elapsed); } } @Override public <V> List<V> mapReduce(MapReduce<V> mapReduce) { var watch = new StopWatch(); if (Strings.isBlank(mapReduce.mapFunction)) throw new Error("mapReduce.mapFunction must not be blank"); if (Strings.isBlank(mapReduce.reduceFunction)) throw new Error("mapReduce.reduceFunction must not be blank"); if (mapReduce.resultClass == null) throw new Error("mapReduce.resultClass must not be null"); List<V> results = Lists.newArrayList(); try { MapReduceIterable<V> query = collection(mapReduce.readPreference) .mapReduce(mapReduce.mapFunction, mapReduce.reduceFunction, mapReduce.resultClass) .maxTime(mongo.timeoutInMs, TimeUnit.MILLISECONDS); if (mapReduce.filter != null) query.filter(mapReduce.filter); fetch(query, results); checkTooManyRowsReturned(results.size()); return results; } finally { long elapsed = watch.elapsed(); int size = results.size(); ActionLogContext.track("mongo", elapsed, size, 0); logger.debug("mapReduce, collection={}, map={}, reduce={}, filter={}, readPref={}, returnedDocs={}, elapsed={}", collectionName, mapReduce.mapFunction, mapReduce.reduceFunction, new BsonLogParam(mapReduce.filter, mongo.registry), mapReduce.readPreference == null ? null : mapReduce.readPreference.getName(), size, elapsed); } } @Override public void replace(T entity) { var watch = new StopWatch(); Object id = null; validator.validate(entity, false); try { id = mongo.codecs.id(entity); if (id == null) throw new Error("entity must have id, entityClass=" + entityClass.getCanonicalName()); collection().replaceOne(Filters.eq("_id", id), entity, new ReplaceOptions().upsert(true)); } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, 0, 1); logger.debug("replace, collection={}, id={}, elapsed={}", collectionName, id, elapsed); checkSlowOperation(elapsed); } } @Override public void bulkReplace(List<T> entities) { var watch = new StopWatch(); if (entities == null || entities.isEmpty()) throw new Error("entities must not be empty"); int size = entities.size(); for (T entity : entities) validator.validate(entity, false); try { List<ReplaceOneModel<T>> models = new ArrayList<>(size); for (T entity : entities) { Object id = mongo.codecs.id(entity); if (id == null) throw new Error("entity must have id, entityClass=" + entityClass.getCanonicalName()); models.add(new ReplaceOneModel<>(Filters.eq("_id", id), entity, new ReplaceOptions().upsert(true))); } collection().bulkWrite(models, new BulkWriteOptions().ordered(false)); } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, 0, size); logger.debug("bulkReplace, collection={}, size={}, elapsed={}", collectionName, size, elapsed); checkSlowOperation(elapsed); } } @Override public long update(Bson filter, Bson update) { var watch = new StopWatch(); long updatedRows = 0; try { UpdateResult result = collection().updateMany(filter, update); updatedRows = result.getModifiedCount(); return updatedRows; } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, 0, (int) updatedRows); logger.debug("update, collection={}, filter={}, update={}, updatedRows={}, elapsed={}", collectionName, new BsonLogParam(filter, mongo.registry), new BsonLogParam(update, mongo.registry), updatedRows, elapsed); checkSlowOperation(elapsed); } } @Override public boolean delete(Object id) { var watch = new StopWatch(); long deletedRows = 0; try { DeleteResult result = collection().deleteOne(Filters.eq("_id", id)); deletedRows = result.getDeletedCount(); return deletedRows == 1; } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, 0, (int) deletedRows); logger.debug("delete, collection={}, id={}, elapsed={}", collectionName, id, elapsed); checkSlowOperation(elapsed); } } @Override public long delete(Bson filter) { var watch = new StopWatch(); long deletedRows = 0; try { DeleteResult result = collection().deleteMany(filter == null ? new BsonDocument() : filter); deletedRows = result.getDeletedCount(); return deletedRows; } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, 0, (int) deletedRows); logger.debug("delete, collection={}, filter={}, deletedRows={}, elapsed={}", collectionName, new BsonLogParam(filter, mongo.registry), deletedRows, elapsed); checkSlowOperation(elapsed); } } @Override public long bulkDelete(List<?> ids) { var watch = new StopWatch(); int size = ids.size(); int deletedRows = 0; try { List<DeleteOneModel<T>> models = new ArrayList<>(size); for (Object id : ids) { models.add(new DeleteOneModel<>(Filters.eq("_id", id))); } BulkWriteResult result = collection().bulkWrite(models, new BulkWriteOptions().ordered(false)); deletedRows = result.getDeletedCount(); return deletedRows; } finally { long elapsed = watch.elapsed(); ActionLogContext.track("mongo", elapsed, 0, deletedRows); logger.debug("bulkDelete, collection={}, ids={}, size={}, deletedRows={}, elapsed={}", collectionName, ids, size, deletedRows, elapsed); checkSlowOperation(elapsed); } } private FindIterable<T> mongoQuery(Query query) { FindIterable<T> mongoQuery = collection(query.readPreference).find(query.filter == null ? new BsonDocument() : query.filter); if (query.projection != null) mongoQuery.projection(query.projection); if (query.sort != null) mongoQuery.sort(query.sort); if (query.skip != null) mongoQuery.skip(query.skip); if (query.limit != null) mongoQuery.limit(query.limit); return mongoQuery; } private <V> void fetch(MongoIterable<V> iterable, List<V> results) { try (MongoCursor<V> cursor = iterable.iterator()) { while (cursor.hasNext()) { results.add(cursor.next()); } } } private void checkSlowOperation(long elapsed) { if (elapsed > mongo.slowOperationThresholdInNanos) logger.warn(Markers.errorCode("SLOW_MONGODB"), "slow mongoDB query, elapsed={}", Duration.ofNanos(elapsed)); } private void checkTooManyRowsReturned(int size) { if (size > mongo.tooManyRowsReturnedThreshold) logger.warn(Markers.errorCode("TOO_MANY_ROWS_RETURNED"), "too many rows returned, returnedRows={}", size); } private com.mongodb.client.MongoCollection<T> collection(ReadPreference readPreference) { if (readPreference != null) return collection().withReadPreference(readPreference); return collection(); } private com.mongodb.client.MongoCollection<T> collection() { if (collection == null) collection = mongo.mongoCollection(entityClass); return collection; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto package com.google.privacy.dlp.v2; /** * * * <pre> * List of exclude infoTypes. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.ExcludeInfoTypes} */ public final class ExcludeInfoTypes extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ExcludeInfoTypes) ExcludeInfoTypesOrBuilder { private static final long serialVersionUID = 0L; // Use ExcludeInfoTypes.newBuilder() to construct. private ExcludeInfoTypes(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExcludeInfoTypes() { infoTypes_ = java.util.Collections.emptyList(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ExcludeInfoTypes( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { infoTypes_ = new java.util.ArrayList<com.google.privacy.dlp.v2.InfoType>(); mutable_bitField0_ |= 0x00000001; } infoTypes_.add( input.readMessage( com.google.privacy.dlp.v2.InfoType.parser(), extensionRegistry)); break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { infoTypes_ = java.util.Collections.unmodifiableList(infoTypes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeInfoTypes_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeInfoTypes_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.ExcludeInfoTypes.class, com.google.privacy.dlp.v2.ExcludeInfoTypes.Builder.class); } public static final int INFO_TYPES_FIELD_NUMBER = 1; private java.util.List<com.google.privacy.dlp.v2.InfoType> infoTypes_; /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public java.util.List<com.google.privacy.dlp.v2.InfoType> getInfoTypesList() { return infoTypes_; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public java.util.List<? extends com.google.privacy.dlp.v2.InfoTypeOrBuilder> getInfoTypesOrBuilderList() { return infoTypes_; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public int getInfoTypesCount() { return infoTypes_.size(); } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public com.google.privacy.dlp.v2.InfoType getInfoTypes(int index) { return infoTypes_.get(index); } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public com.google.privacy.dlp.v2.InfoTypeOrBuilder getInfoTypesOrBuilder(int index) { return infoTypes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < infoTypes_.size(); i++) { output.writeMessage(1, infoTypes_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < infoTypes_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, infoTypes_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.ExcludeInfoTypes)) { return super.equals(obj); } com.google.privacy.dlp.v2.ExcludeInfoTypes other = (com.google.privacy.dlp.v2.ExcludeInfoTypes) obj; boolean result = true; result = result && getInfoTypesList().equals(other.getInfoTypesList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getInfoTypesCount() > 0) { hash = (37 * hash) + INFO_TYPES_FIELD_NUMBER; hash = (53 * hash) + getInfoTypesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.ExcludeInfoTypes prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List of exclude infoTypes. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.ExcludeInfoTypes} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ExcludeInfoTypes) com.google.privacy.dlp.v2.ExcludeInfoTypesOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeInfoTypes_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeInfoTypes_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.ExcludeInfoTypes.class, com.google.privacy.dlp.v2.ExcludeInfoTypes.Builder.class); } // Construct using com.google.privacy.dlp.v2.ExcludeInfoTypes.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getInfoTypesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (infoTypesBuilder_ == null) { infoTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { infoTypesBuilder_.clear(); } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeInfoTypes_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.ExcludeInfoTypes getDefaultInstanceForType() { return com.google.privacy.dlp.v2.ExcludeInfoTypes.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.ExcludeInfoTypes build() { com.google.privacy.dlp.v2.ExcludeInfoTypes result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.ExcludeInfoTypes buildPartial() { com.google.privacy.dlp.v2.ExcludeInfoTypes result = new com.google.privacy.dlp.v2.ExcludeInfoTypes(this); int from_bitField0_ = bitField0_; if (infoTypesBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { infoTypes_ = java.util.Collections.unmodifiableList(infoTypes_); bitField0_ = (bitField0_ & ~0x00000001); } result.infoTypes_ = infoTypes_; } else { result.infoTypes_ = infoTypesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.ExcludeInfoTypes) { return mergeFrom((com.google.privacy.dlp.v2.ExcludeInfoTypes) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.ExcludeInfoTypes other) { if (other == com.google.privacy.dlp.v2.ExcludeInfoTypes.getDefaultInstance()) return this; if (infoTypesBuilder_ == null) { if (!other.infoTypes_.isEmpty()) { if (infoTypes_.isEmpty()) { infoTypes_ = other.infoTypes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureInfoTypesIsMutable(); infoTypes_.addAll(other.infoTypes_); } onChanged(); } } else { if (!other.infoTypes_.isEmpty()) { if (infoTypesBuilder_.isEmpty()) { infoTypesBuilder_.dispose(); infoTypesBuilder_ = null; infoTypes_ = other.infoTypes_; bitField0_ = (bitField0_ & ~0x00000001); infoTypesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getInfoTypesFieldBuilder() : null; } else { infoTypesBuilder_.addAllMessages(other.infoTypes_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.privacy.dlp.v2.ExcludeInfoTypes parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.privacy.dlp.v2.ExcludeInfoTypes) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.privacy.dlp.v2.InfoType> infoTypes_ = java.util.Collections.emptyList(); private void ensureInfoTypesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { infoTypes_ = new java.util.ArrayList<com.google.privacy.dlp.v2.InfoType>(infoTypes_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.InfoType, com.google.privacy.dlp.v2.InfoType.Builder, com.google.privacy.dlp.v2.InfoTypeOrBuilder> infoTypesBuilder_; /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public java.util.List<com.google.privacy.dlp.v2.InfoType> getInfoTypesList() { if (infoTypesBuilder_ == null) { return java.util.Collections.unmodifiableList(infoTypes_); } else { return infoTypesBuilder_.getMessageList(); } } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public int getInfoTypesCount() { if (infoTypesBuilder_ == null) { return infoTypes_.size(); } else { return infoTypesBuilder_.getCount(); } } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public com.google.privacy.dlp.v2.InfoType getInfoTypes(int index) { if (infoTypesBuilder_ == null) { return infoTypes_.get(index); } else { return infoTypesBuilder_.getMessage(index); } } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder setInfoTypes(int index, com.google.privacy.dlp.v2.InfoType value) { if (infoTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInfoTypesIsMutable(); infoTypes_.set(index, value); onChanged(); } else { infoTypesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder setInfoTypes( int index, com.google.privacy.dlp.v2.InfoType.Builder builderForValue) { if (infoTypesBuilder_ == null) { ensureInfoTypesIsMutable(); infoTypes_.set(index, builderForValue.build()); onChanged(); } else { infoTypesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder addInfoTypes(com.google.privacy.dlp.v2.InfoType value) { if (infoTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInfoTypesIsMutable(); infoTypes_.add(value); onChanged(); } else { infoTypesBuilder_.addMessage(value); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder addInfoTypes(int index, com.google.privacy.dlp.v2.InfoType value) { if (infoTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInfoTypesIsMutable(); infoTypes_.add(index, value); onChanged(); } else { infoTypesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder addInfoTypes(com.google.privacy.dlp.v2.InfoType.Builder builderForValue) { if (infoTypesBuilder_ == null) { ensureInfoTypesIsMutable(); infoTypes_.add(builderForValue.build()); onChanged(); } else { infoTypesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder addInfoTypes( int index, com.google.privacy.dlp.v2.InfoType.Builder builderForValue) { if (infoTypesBuilder_ == null) { ensureInfoTypesIsMutable(); infoTypes_.add(index, builderForValue.build()); onChanged(); } else { infoTypesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder addAllInfoTypes( java.lang.Iterable<? extends com.google.privacy.dlp.v2.InfoType> values) { if (infoTypesBuilder_ == null) { ensureInfoTypesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, infoTypes_); onChanged(); } else { infoTypesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder clearInfoTypes() { if (infoTypesBuilder_ == null) { infoTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { infoTypesBuilder_.clear(); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public Builder removeInfoTypes(int index) { if (infoTypesBuilder_ == null) { ensureInfoTypesIsMutable(); infoTypes_.remove(index); onChanged(); } else { infoTypesBuilder_.remove(index); } return this; } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public com.google.privacy.dlp.v2.InfoType.Builder getInfoTypesBuilder(int index) { return getInfoTypesFieldBuilder().getBuilder(index); } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public com.google.privacy.dlp.v2.InfoTypeOrBuilder getInfoTypesOrBuilder(int index) { if (infoTypesBuilder_ == null) { return infoTypes_.get(index); } else { return infoTypesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public java.util.List<? extends com.google.privacy.dlp.v2.InfoTypeOrBuilder> getInfoTypesOrBuilderList() { if (infoTypesBuilder_ != null) { return infoTypesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(infoTypes_); } } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public com.google.privacy.dlp.v2.InfoType.Builder addInfoTypesBuilder() { return getInfoTypesFieldBuilder() .addBuilder(com.google.privacy.dlp.v2.InfoType.getDefaultInstance()); } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public com.google.privacy.dlp.v2.InfoType.Builder addInfoTypesBuilder(int index) { return getInfoTypesFieldBuilder() .addBuilder(index, com.google.privacy.dlp.v2.InfoType.getDefaultInstance()); } /** * * * <pre> * InfoType list in ExclusionRule rule drops a finding when it overlaps or * contained within with a finding of an infoType from this list. For * example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and * `exclusion_rule` containing `exclude_info_types.info_types` with * "EMAIL_ADDRESS" the phone number findings are dropped if they overlap * with EMAIL_ADDRESS finding. * That leads to "555-222-2222&#64;example.org" to generate only a single * finding, namely email address. * </pre> * * <code>repeated .google.privacy.dlp.v2.InfoType info_types = 1;</code> */ public java.util.List<com.google.privacy.dlp.v2.InfoType.Builder> getInfoTypesBuilderList() { return getInfoTypesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.InfoType, com.google.privacy.dlp.v2.InfoType.Builder, com.google.privacy.dlp.v2.InfoTypeOrBuilder> getInfoTypesFieldBuilder() { if (infoTypesBuilder_ == null) { infoTypesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.InfoType, com.google.privacy.dlp.v2.InfoType.Builder, com.google.privacy.dlp.v2.InfoTypeOrBuilder>( infoTypes_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); infoTypes_ = null; } return infoTypesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ExcludeInfoTypes) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ExcludeInfoTypes) private static final com.google.privacy.dlp.v2.ExcludeInfoTypes DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ExcludeInfoTypes(); } public static com.google.privacy.dlp.v2.ExcludeInfoTypes getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExcludeInfoTypes> PARSER = new com.google.protobuf.AbstractParser<ExcludeInfoTypes>() { @java.lang.Override public ExcludeInfoTypes parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ExcludeInfoTypes(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ExcludeInfoTypes> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExcludeInfoTypes> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.ExcludeInfoTypes getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2013 Guidewire Software, Inc. */ package gw.plugin.ij.compiler; import com.google.common.base.Joiner; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.intellij.debugger.DebuggerManagerEx; import com.intellij.debugger.engine.DebugProcessImpl; import com.intellij.debugger.engine.evaluation.EvaluateException; import com.intellij.debugger.engine.evaluation.EvaluationContextImpl; import com.intellij.debugger.engine.events.DebuggerContextCommandImpl; import com.intellij.debugger.engine.managerThread.DebuggerCommand; import com.intellij.debugger.impl.DebuggerContextImpl; import com.intellij.debugger.impl.DebuggerSession; import com.intellij.debugger.jdi.ThreadReferenceProxyImpl; import com.intellij.debugger.jdi.VirtualMachineProxyImpl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.compiler.CompileContext; import com.intellij.openapi.compiler.CompileScope; import com.intellij.openapi.compiler.CompilerMessageCategory; import com.intellij.openapi.compiler.TranslatingCompiler; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.module.Module; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Computable; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.util.Chunk; import com.sun.jdi.ArrayReference; import com.sun.jdi.ArrayType; import com.sun.jdi.ClassType; import com.sun.jdi.ReferenceType; import com.sun.jdi.Value; import gw.compiler.ij.api.TypeFingerprint; import gw.compiler.ij.processors.DependencySink; import gw.config.CommonServices; import gw.fs.IFile; import gw.lang.GosuShop; import gw.lang.reflect.IType; import gw.lang.reflect.TypeSystem; import gw.lang.reflect.java.IJavaClassInfo; import gw.lang.reflect.module.IModule; import gw.plugin.ij.compiler.parser.CompilerParser; import gw.plugin.ij.filesystem.IDEAFile; import gw.plugin.ij.util.FileUtil; import gw.plugin.ij.util.GosuModuleUtil; import gw.plugin.ij.util.TypeUtil; import gw.util.fingerprint.FP64; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; public class GosuCompiler implements TranslatingCompiler { private static final Logger LOG = Logger.getInstance(GosuCompiler.class); private static final String PROP_EXT = "properties"; private static final String PCF_EXT = "pcf"; private static Set<String> DISALLOWED_EXTENSIONS = new HashSet<>(Arrays.asList("java", "gs", "gsx", "gsp")); private IExternalCompiler externalCompiler; @NotNull @Override public String getDescription() { return "Gosu Compiler"; } @Override public boolean validateConfiguration(CompileScope scope) { return true; // TODO: } @Override public boolean isCompilableFile(@NotNull VirtualFile file, CompileContext context) { final String path = file.getPath(); final String name = file.getName(); final String extension = file.getExtension(); if (CommonServices.getPlatformHelper().isPathIgnored(path)) { return false; } //## todo: we chould instead check for @DoNotVerifyResource annotation and just don't report errors for those if ((name.startsWith("Errant_") && "pcf".equals(extension)) || (name.contains("Errant") && "gs".equals(extension))) { return false; } return CompilerParser.accepts(file); } @Override public void compile(@NotNull final CompileContext context, @NotNull Chunk<Module> moduleChunk, @NotNull final VirtualFile[] files, @NotNull final OutputSink sink) { final Set<Module> nodes = moduleChunk.getNodes(); if (nodes.size() > 1) { LOG.warn("Cyclic dependency during compilation: " + Joiner.on(',').join(nodes)); return; } final boolean useExternal = files.length > CompilerSettings.getInstance().getExternalToIncrementalCompilerLimit(); externalCompiler = useExternal ? createOrGetExternalCompiler(context.getProject()) : null; final GosuCompilerMonitor monitor = GosuCompilerMonitor.getInstance(context.getProject()); final FileDependencyCache cache = monitor.getDependencyCache(); final Module ijModule = nodes.iterator().next(); final IModule gsModule = GosuModuleUtil.getModule(ijModule); TypeSystem.pushModule(gsModule); try { final List<VirtualFile> filesToCompile = Arrays.asList(files); if (context.isRebuild()) { fullCompile(context, cache, ijModule, filesToCompile, sink); monitor.setCacheInSync(true); } else { if (!monitor.isCacheInSync()) { context.requestRebuildNextTime("Dependency cache is not in sync. Rebuild is required."); return; } incrementalCompile(context, cache, ijModule, Lists.<VirtualFile>newArrayList(), filesToCompile, sink, true); } notifyTargetProcessOfChanges(context, files); } finally { TypeSystem.popModule(gsModule); } } private static long getFingerprint(final VirtualFile file, final IModule gosuModule) { return ApplicationManager.getApplication().runReadAction(new Computable<Long>() { public Long compute() { final FP64 fp = new FP64(); final String extension = file.getExtension(); if (PROP_EXT.equals(extension)) { handlePropertiesFileFingerprint(file, fp); } else if (PCF_EXT.equals(extension)) { handleFileText(file, fp); } else { List<String> types = TypeUtil.getTypesForFile(gosuModule, file); for (String qualifiedName : Ordering.natural().sortedCopy(types)) { if ("java".equals(extension)) { final IJavaClassInfo type = TypeSystem.getJavaClassInfo(qualifiedName, gosuModule); if (type != null) { TypeFingerprint.extend(fp, type); } else { LOG.warn("Could not resolve Java type " + qualifiedName + " during taking fingerprint"); } } else { final IType type = TypeSystem.getByFullNameIfValid(qualifiedName, gosuModule); if (type != null) { TypeFingerprint.extend(fp, type); } else { handleFileText(file, fp); // LOG.warn("Could not resolve type " + qualifiedName + " during taking fingerprint"); } } } } return fp.getRawFingerprint(); } }); } public static VirtualFile getOutputDirectory(@NotNull CompileContext context, Module module, boolean tests) { return tests ? context.getModuleOutputDirectoryForTests(module) : context.getModuleOutputDirectory(module); } public static void setProgressText(CompileContext context, VirtualFile sourceFile) { context.getProgressIndicator().setText(String.format("Compiling '%s' [%s]", sourceFile.getName(), context.getModuleByFile(sourceFile).getName())); } public static void notifyTargetProcessOfChanges(@NotNull CompileContext context, @NotNull final VirtualFile... files) { final Project project = context.getProject(); final DebuggerSession session = DebuggerManagerEx.getInstanceEx(project).getContext().getDebuggerSession(); if (session != null) { if (session.isPaused()) { invokeDirectly(session, files); } else { final DebugProcessImpl process = session.getProcess(); process.getManagerThread().invokeCommand(new DebuggerCommand() { public void action() { final VirtualMachineProxyImpl vm = process.getVirtualMachineProxy(); final List<ReferenceType> types = vm.classesByName("gw.internal.gosu.parser.ReloadClassesIndicator"); final List<String> changedTypes = TypeUtil.getTypesForFiles(TypeSystem.getGlobalModule(), Arrays.asList(files)); vm.redefineClasses(ImmutableMap.of(types.get(0), GosuShop.updateReloadClassesIndicator(changedTypes, ""))); } public void commandCancelled() { // Nothing to do } }); } } } private static void invokeDirectly(final DebuggerSession session, final VirtualFile[] files) { final DebuggerContextImpl debuggerContext = DebuggerManagerEx.getInstanceEx(session.getProject()).getContext(); final DebugProcessImpl process = session.getProcess(); process.getManagerThread().schedule(new DebuggerContextCommandImpl(debuggerContext) { public Priority getPriority() { return Priority.HIGH; } public void threadAction() { final EvaluationContextImpl evaluationContext = debuggerContext.createEvaluationContext(); try { final VirtualMachineProxyImpl vm = process.getVirtualMachineProxy(); List<ReferenceType> types = vm.classesByName(TypeSystem.class.getName()); ClassType classType = (ClassType) types.get(0); vm.getDebugProcess().invokeMethod(evaluationContext, classType, classType.methodsByName("refreshedFiles").get(0), Arrays.asList(getFilesAsValue(evaluationContext, files))); } catch (Throwable e) { if (e instanceof EvaluateException) { throw new RuntimeException(e); } } } private Value getFilesAsValue(EvaluationContextImpl evaluationContext, VirtualFile[] files) throws Exception { List<Value> values = new ArrayList<>(); VirtualMachineProxyImpl machineProxy = process.getVirtualMachineProxy(); for (VirtualFile file : files) { values.add(machineProxy.mirrorOf(file.getPath())); } ArrayType objectArrayClass = (ArrayType) process.findClass(evaluationContext, "java.lang.String[]", evaluationContext.getClassLoader()); if (objectArrayClass == null) { throw new IllegalStateException(); } ArrayReference argArray = process.newInstance(objectArrayClass, files.length); evaluationContext.getSuspendContext().keep(argArray); // to avoid ObjectCollectedException argArray.setValues(values); return argArray; } }); } @NotNull private ThreadReferenceProxyImpl findDebugThread(@NotNull VirtualMachineProxyImpl vm) { for (ThreadReferenceProxyImpl thread : vm.allThreads()) { if (thread.name().equals("Gosu class redefiner")) { return thread; } } throw new IllegalStateException("Could not find thread: " + "Gosu class redefiner"); } private void refreshFiles(CompileContext context, List<VirtualFile> files) { sortEtiBeforeEtx(files); if (externalCompiler != null) { return; //we do not need to refresh anything in external compiler because all resources are fresh. } for (final VirtualFile file : files) { context.getProgressIndicator().checkCanceled(); // The refresh needs to be run in a read action because to insure lock safe ordering. // First the PSI lock, then the TS lock ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { TypeSystem.refreshed(FileUtil.toIResource(file)); } }); } } private void refreshModule(CompileContext context, final IModule module) { if (externalCompiler != null) { return; //we do not need to refresh anything in external compiler because all resources are fresh. } context.getProgressIndicator().checkCanceled(); // The refresh needs to be run in a read action because to insure lock safe ordering. // First the PSI lock, then the TS lock ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { TypeSystem.refresh(module); } }); } private void sortEtiBeforeEtx(List<VirtualFile> files) { Collections.sort(files, new Comparator<VirtualFile>() { public int compare(VirtualFile o1, VirtualFile o2) { int iRes = o1.getParent().getPath().compareToIgnoreCase(o2.getParent().getPath()); if (iRes == 0) { // .eti comes before .etx return o1.getName().compareToIgnoreCase(o2.getName()); } // Ensure ../metadata/ comes before ../extensions/ return -iRes; } }); } private FileDependencyInfo internalCompileFile(@NotNull CompileContext context, @NotNull Module ijModule, @NotNull VirtualFile file, List<OutputItem> outputItems) { final DependencySink sink = new DependencySink(); final boolean successfully; final long start = System.currentTimeMillis(); final IModule module = GosuModuleUtil.getModule(ijModule); TypeSystem.pushModule(module); try { successfully = CompilerParser.parse(context, file, outputItems, sink); } catch (ProcessCanceledException e) { throw e; } catch (Throwable e) { final String url = VirtualFileManager.constructUrl(LocalFileSystem.PROTOCOL, file.getPath()); addInternalCompilerError(context, e, url); return null; } finally { TypeSystem.popModule(module); } if (successfully) { try { final long fingerprint = getFingerprint(file, module); final int duration = (int) (System.currentTimeMillis() - start); return new FileDependencyInfo(file, getFiles(sink), sink.getDisplayKeys(), fingerprint, duration); } catch (Throwable e) { final String url = VirtualFileManager.constructUrl(LocalFileSystem.PROTOCOL, file.getPath()); addInternalCompilerError(context, e, url); return null; } } else { return null; } } private void addInternalCompilerError(CompileContext context, Throwable e, String url) { context.addMessage(CompilerMessageCategory.ERROR, "Internal compiler error\n" + e, url, 0, 0); LOG.error("Internal compiler error", e); } private Set<VirtualFile> getFiles(DependencySink sink) { Set<VirtualFile> result = new HashSet<>(); for (IFile file : sink.getFiles()) { result.add(((IDEAFile) file).getVirtualFile()); } return result; } @Nullable private FileDependencyInfo compileFile(@NotNull CompileContext context, @NotNull Module ijModule, @NotNull VirtualFile file, List<OutputItem> outputItems) { CommonServices.getMemoryMonitor().reclaimMemory(null); setProgressText(context, file); String extension = file.getExtension(); if (isInConfigFolder(file, context.getProject()) && extension != null && DISALLOWED_EXTENSIONS.contains(extension)) { final OpenFileDescriptor descriptor = new OpenFileDescriptor(context.getProject(), file, 0); final String url = VirtualFileManager.constructUrl(LocalFileSystem.PROTOCOL, file.getPath()); context.addMessage(CompilerMessageCategory.ERROR, "Only configuration files are allowed in the config folder.", url, 0, 0, descriptor); return null; } else if (externalCompiler != null && canUseExternalCompiler(file)) { return externalCompiler.compileFile(context, ijModule, file, outputItems); } else { return internalCompileFile(context, ijModule, file, outputItems); } } private boolean canUseExternalCompiler(VirtualFile file) { final String extension = file.getExtension(); if ("gx".equals(extension) || ("xml".equals(extension) && file.getPath().contains("/config/resources/productmodel/"))) { return false; } return true; } private boolean isInConfigFolder(VirtualFile file, Project project) { ProjectFileIndex projectFileIndex = ProjectRootManager.getInstance(project).getFileIndex(); return "config".equals(projectFileIndex.getSourceRootForFile(file).getName()); } private void fullCompile(@NotNull final CompileContext context, FileDependencyCache cache, @NotNull Module ijModule, @NotNull List<VirtualFile> filesToCompile, @NotNull final OutputSink sink) { // Refresh sortEtiBeforeEtx(filesToCompile); refreshModule(context, GosuModuleUtil.getModule(ijModule)); final List<OutputItem> outputItems = Lists.newArrayList(); for (VirtualFile file : filesToCompile) { context.getProgressIndicator().checkCanceled(); final FileDependencyInfo info = compileFile(context, ijModule, file, outputItems); if (info != null) { cache.put(info); } } // Result sink.add(getOutputDirectory(context, ijModule, false).getPath(), outputItems, VirtualFile.EMPTY_ARRAY); } private void incrementalCompile( @NotNull final CompileContext context, FileDependencyCache cache, @NotNull Module ijModule, @NotNull List<VirtualFile> processedFiles, @NotNull List<VirtualFile> filesToCompile, @NotNull final OutputSink sink, boolean considerDependents) { if (filesToCompile.size() > CompilerSettings.getInstance().getExternalToIncrementalCompilerLimit()) { externalCompiler = createOrGetExternalCompiler(context.getProject()); } // Cache read final Map<VirtualFile, FileDependencyInfo> fileToFingerprint = Maps.newHashMap(); final Multimap<VirtualFile, VirtualFile> fileToDependents = HashMultimap.create(); for (VirtualFile file : filesToCompile) { // Fingerprint final FileDependencyInfo fileDependency = cache.get(file); if (fileDependency != null) { fileToFingerprint.put(file, fileDependency); } cache.remove(file); // Dependencies if (considerDependents) { Set<VirtualFile> dependents; if ("display.properties".equals(file.getName())) { dependents = getDisplayKeysDependents(file, cache); } else { dependents = cache.getDependentsOn(file); } if (!dependents.isEmpty()) { fileToDependents.putAll(file, dependents); } } } // Preparing for compilation refreshFiles(context, filesToCompile); // Compile final List<OutputItem> outputItems = Lists.newArrayList(); for (VirtualFile file : filesToCompile) { context.getProgressIndicator().checkCanceled(); if (!file.exists()) { continue; } Module fileModule = context.getModuleByFile(file); if (fileModule == null) { //this will happen when some deleted files are still in dependency cache //but parent directory for such files were deleted //and index does not have module for this folder/package any more. continue; } final FileDependencyInfo info = compileFile(context, fileModule, file, outputItems); final FileDependencyInfo oldFileDependencyInfo = fileToFingerprint.get(file); if (info != null) { cache.put(info); if (oldFileDependencyInfo != null && oldFileDependencyInfo.getFingerprint() == info.getFingerprint()) { fileToDependents.removeAll(file); } } else { if (oldFileDependencyInfo != null) { //put old info back to avoid second layer of dependencies. cache.put(oldFileDependencyInfo); //do not recompile dependents until dependecy is compiled fileToDependents.removeAll(file); } } processedFiles.add(file); } // Dependencies final Set<VirtualFile> dependents = Sets.newLinkedHashSet(fileToDependents.values()); dependents.removeAll(processedFiles); // Result with external dependencies // sink.add(getOutputDirectory(context, ijModule, false).getPath(), outputItems, new VirtualFile[0]); // Do it again if (!dependents.isEmpty()) { incrementalCompile(context, cache, ijModule, processedFiles, Lists.newArrayList(dependents), sink, false); } } private IExternalCompiler createOrGetExternalCompiler(Project project) { if (externalCompiler == null) { externalCompiler = project.getComponent(IExternalCompiler.class); } return externalCompiler; } private Set<VirtualFile> getDisplayKeysDependents(VirtualFile propFile, FileDependencyCache cache) { Set<VirtualFile> dependents = Sets.newHashSet(); final Set<String> displayKeysNew = loadPropertyKeys(propFile); final Set<String> displayKeysOld = cache.getDisplayKeys(propFile); final Set<String> added = Sets.newHashSet(displayKeysNew); added.removeAll(displayKeysOld); final Set<String> deleted = Sets.newHashSet(displayKeysOld); deleted.removeAll(displayKeysNew); for (String key : added) { dependents.addAll(cache.getDependentsOnByDisplayKey(key)); } for (String key : deleted) { dependents.addAll(cache.getDependentsOnByDisplayKey(key)); } return dependents; } private static void handlePropertiesFileFingerprint(VirtualFile file, FP64 fp) { OrderedPropertyKeys keys = new OrderedPropertyKeys(); StringReader reader = null; try { reader = new StringReader(FileDocumentManager.getInstance().getDocument(file).getText()); keys.load(reader); for (String key : keys.getKeys()) { fp.extend(key); } } catch (IOException e) { LOG.warn("Could not load *.properties during taking fingerprint"); e.printStackTrace(); } finally { if (reader != null) { reader.close(); } } } private static void handleFileText(VirtualFile file, FP64 fp) { final String text = FileDocumentManager.getInstance().getDocument(file).getText(); fp.extend(text); } private static class OrderedPropertyKeys extends Properties { private ArrayList<String> orderedKeys = new ArrayList<>(128); @Override public synchronized Object put(Object key, Object value) { orderedKeys.add((String) key); return null; } private ArrayList<String> getKeys() { return orderedKeys; } } private static Set<String> loadPropertyKeys(VirtualFile file) { PropertyKeysSet keys = new PropertyKeysSet(); StringReader reader = null; try { reader = new StringReader(FileDocumentManager.getInstance().getDocument(file).getText()); keys.load(reader); } catch (IOException e) { e.printStackTrace(); } finally { if (reader != null) { reader.close(); } } return keys.getKeys(); } private static class PropertyKeysSet extends Properties { private Set<String> keys = Sets.newHashSet(); @Override public synchronized Object put(Object key, Object value) { keys.add((String) key); return null; } private Set<String> getKeys() { return keys; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.config.ConfigException; import org.apache.kafka.common.config.TopicConfig; import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.Serializer; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.FailOnInvalidTimestamp; import org.apache.kafka.streams.processor.TimestampExtractor; import org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor; import org.hamcrest.CoreMatchers; import org.junit.Before; import org.junit.Test; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import static org.apache.kafka.common.requests.IsolationLevel.READ_COMMITTED; import static org.apache.kafka.common.requests.IsolationLevel.READ_UNCOMMITTED; import static org.apache.kafka.streams.StreamsConfig.EXACTLY_ONCE; import static org.apache.kafka.streams.StreamsConfig.TOPOLOGY_OPTIMIZATION; import static org.apache.kafka.streams.StreamsConfig.adminClientPrefix; import static org.apache.kafka.streams.StreamsConfig.consumerPrefix; import static org.apache.kafka.streams.StreamsConfig.producerPrefix; import static org.apache.kafka.test.StreamsTestUtils.minimalStreamsConfig; import static org.hamcrest.core.IsEqual.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class StreamsConfigTest { private final Properties props = new Properties(); private StreamsConfig streamsConfig; @Before public void setUp() { props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-config-test"); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); props.put("key.deserializer.encoding", "UTF8"); props.put("value.deserializer.encoding", "UTF-16"); streamsConfig = new StreamsConfig(props); } @Test(expected = ConfigException.class) public void shouldThrowExceptionIfApplicationIdIsNotSet() { props.remove(StreamsConfig.APPLICATION_ID_CONFIG); new StreamsConfig(props); } @Test(expected = ConfigException.class) public void shouldThrowExceptionIfBootstrapServersIsNotSet() { props.remove(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG); new StreamsConfig(props); } @Test public void testGetProducerConfigs() { final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getProducerConfigs(clientId); assertEquals(returnedProps.get(ProducerConfig.CLIENT_ID_CONFIG), clientId + "-producer"); assertEquals(returnedProps.get(ProducerConfig.LINGER_MS_CONFIG), "100"); } @Test public void testGetConsumerConfigs() { final String groupId = "example-application"; final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getMainConsumerConfigs(groupId, clientId); assertEquals(returnedProps.get(ConsumerConfig.CLIENT_ID_CONFIG), clientId + "-consumer"); assertEquals(returnedProps.get(ConsumerConfig.GROUP_ID_CONFIG), groupId); assertEquals(returnedProps.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "1000"); } @Test public void consumerConfigMustContainStreamPartitionAssignorConfig() { props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 42); props.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1); props.put(StreamsConfig.WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG, 7L); props.put(StreamsConfig.APPLICATION_SERVER_CONFIG, "dummy:host"); props.put(StreamsConfig.RETRIES_CONFIG, 10); props.put(StreamsConfig.adminClientPrefix(StreamsConfig.RETRIES_CONFIG), 5); props.put(StreamsConfig.topicPrefix(TopicConfig.SEGMENT_BYTES_CONFIG), 100); final StreamsConfig streamsConfig = new StreamsConfig(props); final String groupId = "example-application"; final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getMainConsumerConfigs(groupId, clientId); assertEquals(42, returnedProps.get(StreamsConfig.REPLICATION_FACTOR_CONFIG)); assertEquals(1, returnedProps.get(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG)); assertEquals(StreamsPartitionAssignor.class.getName(), returnedProps.get(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG)); assertEquals(7L, returnedProps.get(StreamsConfig.WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG)); assertEquals("dummy:host", returnedProps.get(StreamsConfig.APPLICATION_SERVER_CONFIG)); assertEquals(null, returnedProps.get(StreamsConfig.RETRIES_CONFIG)); assertEquals(5, returnedProps.get(StreamsConfig.adminClientPrefix(StreamsConfig.RETRIES_CONFIG))); assertEquals(100, returnedProps.get(StreamsConfig.topicPrefix(TopicConfig.SEGMENT_BYTES_CONFIG))); } @Test public void consumerConfigMustUseAdminClientConfigForRetries() { props.put(StreamsConfig.adminClientPrefix(StreamsConfig.RETRIES_CONFIG), 20); props.put(StreamsConfig.RETRIES_CONFIG, 10); final StreamsConfig streamsConfig = new StreamsConfig(props); final String groupId = "example-application"; final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getMainConsumerConfigs(groupId, clientId); assertEquals(20, returnedProps.get(StreamsConfig.adminClientPrefix(StreamsConfig.RETRIES_CONFIG))); } @Test public void testGetMainConsumerConfigsWithMainConsumerOverridenPrefix() { props.put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "5"); props.put(StreamsConfig.mainConsumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "50"); final String groupId = "example-application"; final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getMainConsumerConfigs(groupId, clientId); assertEquals("50", returnedProps.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG)); } @Test public void testGetRestoreConsumerConfigs() { final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getRestoreConsumerConfigs(clientId); assertEquals(returnedProps.get(ConsumerConfig.CLIENT_ID_CONFIG), clientId + "-restore-consumer"); assertNull(returnedProps.get(ConsumerConfig.GROUP_ID_CONFIG)); } @Test public void defaultSerdeShouldBeConfigured() { final Map<String, Object> serializerConfigs = new HashMap<>(); serializerConfigs.put("key.serializer.encoding", "UTF8"); serializerConfigs.put("value.serializer.encoding", "UTF-16"); final Serializer<String> serializer = Serdes.String().serializer(); final String str = "my string for testing"; final String topic = "my topic"; serializer.configure(serializerConfigs, true); assertEquals("Should get the original string after serialization and deserialization with the configured encoding", str, streamsConfig.defaultKeySerde().deserializer().deserialize(topic, serializer.serialize(topic, str))); serializer.configure(serializerConfigs, false); assertEquals("Should get the original string after serialization and deserialization with the configured encoding", str, streamsConfig.defaultValueSerde().deserializer().deserialize(topic, serializer.serialize(topic, str))); } @Test public void shouldSupportMultipleBootstrapServers() { final List<String> expectedBootstrapServers = Arrays.asList("broker1:9092", "broker2:9092"); final String bootstrapServersString = Utils.join(expectedBootstrapServers, ","); final Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, "irrelevant"); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServersString); final StreamsConfig config = new StreamsConfig(props); final List<String> actualBootstrapServers = config.getList(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG); assertEquals(expectedBootstrapServers, actualBootstrapServers); } @Test public void shouldSupportPrefixedConsumerConfigs() { props.put(consumerPrefix(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG), "earliest"); props.put(consumerPrefix(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG), 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); assertEquals("earliest", consumerConfigs.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)); assertEquals(1, consumerConfigs.get(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)); } @Test public void shouldSupportPrefixedRestoreConsumerConfigs() { props.put(consumerPrefix(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG), 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getRestoreConsumerConfigs("clientId"); assertEquals(1, consumerConfigs.get(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)); } @Test public void shouldSupportPrefixedPropertiesThatAreNotPartOfConsumerConfig() { final StreamsConfig streamsConfig = new StreamsConfig(props); props.put(consumerPrefix("interceptor.statsd.host"), "host"); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); assertEquals("host", consumerConfigs.get("interceptor.statsd.host")); } @Test public void shouldSupportPrefixedPropertiesThatAreNotPartOfRestoreConsumerConfig() { final StreamsConfig streamsConfig = new StreamsConfig(props); props.put(consumerPrefix("interceptor.statsd.host"), "host"); final Map<String, Object> consumerConfigs = streamsConfig.getRestoreConsumerConfigs("clientId"); assertEquals("host", consumerConfigs.get("interceptor.statsd.host")); } @Test public void shouldSupportPrefixedPropertiesThatAreNotPartOfProducerConfig() { final StreamsConfig streamsConfig = new StreamsConfig(props); props.put(producerPrefix("interceptor.statsd.host"), "host"); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); assertEquals("host", producerConfigs.get("interceptor.statsd.host")); } @Test public void shouldSupportPrefixedProducerConfigs() { props.put(producerPrefix(ProducerConfig.BUFFER_MEMORY_CONFIG), 10); props.put(producerPrefix(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG), 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> configs = streamsConfig.getProducerConfigs("clientId"); assertEquals(10, configs.get(ProducerConfig.BUFFER_MEMORY_CONFIG)); assertEquals(1, configs.get(ProducerConfig.METRICS_NUM_SAMPLES_CONFIG)); } @Test public void shouldBeSupportNonPrefixedConsumerConfigs() { props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG, 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); assertEquals("earliest", consumerConfigs.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)); assertEquals(1, consumerConfigs.get(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)); } @Test public void shouldBeSupportNonPrefixedRestoreConsumerConfigs() { props.put(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG, 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getRestoreConsumerConfigs("groupId"); assertEquals(1, consumerConfigs.get(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)); } @Test public void shouldSupportNonPrefixedProducerConfigs() { props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 10); props.put(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG, 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> configs = streamsConfig.getProducerConfigs("clientId"); assertEquals(10, configs.get(ProducerConfig.BUFFER_MEMORY_CONFIG)); assertEquals(1, configs.get(ProducerConfig.METRICS_NUM_SAMPLES_CONFIG)); } @Test public void shouldForwardCustomConfigsWithNoPrefixToAllClients() { final StreamsConfig streamsConfig = new StreamsConfig(props); props.put("custom.property.host", "host"); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); final Map<String, Object> restoreConsumerConfigs = streamsConfig.getRestoreConsumerConfigs("clientId"); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); final Map<String, Object> adminConfigs = streamsConfig.getAdminConfigs("clientId"); assertEquals("host", consumerConfigs.get("custom.property.host")); assertEquals("host", restoreConsumerConfigs.get("custom.property.host")); assertEquals("host", producerConfigs.get("custom.property.host")); assertEquals("host", adminConfigs.get("custom.property.host")); } @Test public void shouldOverrideNonPrefixedCustomConfigsWithPrefixedConfigs() { final StreamsConfig streamsConfig = new StreamsConfig(props); props.put("custom.property.host", "host0"); props.put(consumerPrefix("custom.property.host"), "host1"); props.put(producerPrefix("custom.property.host"), "host2"); props.put(adminClientPrefix("custom.property.host"), "host3"); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); final Map<String, Object> restoreConsumerConfigs = streamsConfig.getRestoreConsumerConfigs("clientId"); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); final Map<String, Object> adminConfigs = streamsConfig.getAdminConfigs("clientId"); assertEquals("host1", consumerConfigs.get("custom.property.host")); assertEquals("host1", restoreConsumerConfigs.get("custom.property.host")); assertEquals("host2", producerConfigs.get("custom.property.host")); assertEquals("host3", adminConfigs.get("custom.property.host")); } @Test public void shouldSupportNonPrefixedAdminConfigs() { props.put(AdminClientConfig.RETRIES_CONFIG, 10); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> configs = streamsConfig.getAdminConfigs("clientId"); assertEquals(10, configs.get(AdminClientConfig.RETRIES_CONFIG)); } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionIfKeySerdeConfigFails() { props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, MisconfiguredSerde.class); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.defaultKeySerde(); } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionIfValueSerdeConfigFails() { props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, MisconfiguredSerde.class); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.defaultValueSerde(); } @Test public void shouldOverrideStreamsDefaultConsumerConfigs() { props.put(StreamsConfig.consumerPrefix(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG), "latest"); props.put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "10"); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); assertEquals("latest", consumerConfigs.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)); assertEquals("10", consumerConfigs.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG)); } @Test public void shouldOverrideStreamsDefaultProducerConfigs() { props.put(StreamsConfig.producerPrefix(ProducerConfig.LINGER_MS_CONFIG), "10000"); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); assertEquals("10000", producerConfigs.get(ProducerConfig.LINGER_MS_CONFIG)); } @Test public void shouldOverrideStreamsDefaultConsumerConifgsOnRestoreConsumer() { props.put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "10"); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getRestoreConsumerConfigs("clientId"); assertEquals("10", consumerConfigs.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG)); } @Test public void shouldResetToDefaultIfConsumerAutoCommitIsOverridden() { props.put(StreamsConfig.consumerPrefix(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG), "true"); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("a", "b"); assertEquals("false", consumerConfigs.get(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)); } @Test public void shouldResetToDefaultIfRestoreConsumerAutoCommitIsOverridden() { props.put(StreamsConfig.consumerPrefix(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG), "true"); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getRestoreConsumerConfigs("client"); assertEquals("false", consumerConfigs.get(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)); } @Test public void testGetRestoreConsumerConfigsWithRestoreConsumerOverridenPrefix() { props.put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "5"); props.put(StreamsConfig.restoreConsumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "50"); final Map<String, Object> returnedProps = streamsConfig.getRestoreConsumerConfigs("clientId"); assertEquals("50", returnedProps.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG)); } @Test public void testGetGlobalConsumerConfigs() { final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getGlobalConsumerConfigs(clientId); assertEquals(returnedProps.get(ConsumerConfig.CLIENT_ID_CONFIG), clientId + "-global-consumer"); assertNull(returnedProps.get(ConsumerConfig.GROUP_ID_CONFIG)); } @Test public void shouldSupportPrefixedGlobalConsumerConfigs() { props.put(consumerPrefix(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG), 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getGlobalConsumerConfigs("clientId"); assertEquals(1, consumerConfigs.get(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)); } @Test public void shouldSupportPrefixedPropertiesThatAreNotPartOfGlobalConsumerConfig() { final StreamsConfig streamsConfig = new StreamsConfig(props); props.put(consumerPrefix("interceptor.statsd.host"), "host"); final Map<String, Object> consumerConfigs = streamsConfig.getGlobalConsumerConfigs("clientId"); assertEquals("host", consumerConfigs.get("interceptor.statsd.host")); } @Test public void shouldBeSupportNonPrefixedGlobalConsumerConfigs() { props.put(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG, 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getGlobalConsumerConfigs("groupId"); assertEquals(1, consumerConfigs.get(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)); } @Test public void shouldResetToDefaultIfGlobalConsumerAutoCommitIsOverridden() { props.put(StreamsConfig.consumerPrefix(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG), "true"); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getGlobalConsumerConfigs("client"); assertEquals("false", consumerConfigs.get(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)); } @Test public void testGetGlobalConsumerConfigsWithGlobalConsumerOverridenPrefix() { props.put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "5"); props.put(StreamsConfig.globalConsumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "50"); final Map<String, Object> returnedProps = streamsConfig.getGlobalConsumerConfigs("clientId"); assertEquals("50", returnedProps.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG)); } @Test public void shouldSetInternalLeaveGroupOnCloseConfigToFalseInConsumer() { final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); assertThat(consumerConfigs.get("internal.leave.group.on.close"), CoreMatchers.<Object>equalTo(false)); } @Test public void shouldAcceptAtLeastOnce() { // don't use `StreamsConfig.AT_LEAST_ONCE` to actually do a useful test props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, "at_least_once"); new StreamsConfig(props); } @Test public void shouldAcceptExactlyOnce() { // don't use `StreamsConfig.EXACLTY_ONCE` to actually do a useful test props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, "exactly_once"); new StreamsConfig(props); } @Test(expected = ConfigException.class) public void shouldThrowExceptionIfNotAtLestOnceOrExactlyOnce() { props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, "bad_value"); new StreamsConfig(props); } @Test public void shouldResetToDefaultIfConsumerIsolationLevelIsOverriddenIfEosEnabled() { props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "anyValue"); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); String isoLevel = (String) consumerConfigs.get(ConsumerConfig.ISOLATION_LEVEL_CONFIG); String name = READ_COMMITTED.name(); assertThat((String) consumerConfigs.get(ConsumerConfig.ISOLATION_LEVEL_CONFIG), equalTo(READ_COMMITTED.name().toLowerCase(Locale.ROOT))); } @Test public void shouldAllowSettingConsumerIsolationLevelIfEosDisabled() { props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, READ_UNCOMMITTED.name().toLowerCase(Locale.ROOT)); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientrId"); assertThat((String) consumerConfigs.get(ConsumerConfig.ISOLATION_LEVEL_CONFIG), equalTo(READ_UNCOMMITTED.name().toLowerCase(Locale.ROOT))); } @Test public void shouldResetToDefaultIfProducerEnableIdempotenceIsOverriddenIfEosEnabled() { props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "anyValue"); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); assertTrue((Boolean) producerConfigs.get(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG)); } @Test public void shouldAllowSettingProducerEnableIdempotenceIfEosDisabled() { props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, false); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); assertThat((Boolean) producerConfigs.get(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG), equalTo(false)); } @Test public void shouldSetDifferentDefaultsIfEosEnabled() { props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getMainConsumerConfigs("groupId", "clientId"); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); assertThat((String) consumerConfigs.get(ConsumerConfig.ISOLATION_LEVEL_CONFIG), equalTo(READ_COMMITTED.name().toLowerCase(Locale.ROOT))); assertTrue((Boolean) producerConfigs.get(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG)); assertThat((Integer) producerConfigs.get(ProducerConfig.RETRIES_CONFIG), equalTo(Integer.MAX_VALUE)); assertThat(streamsConfig.getLong(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG), equalTo(100L)); } @Test public void shouldNotOverrideUserConfigRetriesIfExactlyOnceEnabled() { final int numberOfRetries = 42; props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); props.put(ProducerConfig.RETRIES_CONFIG, numberOfRetries); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); assertThat((Integer) producerConfigs.get(ProducerConfig.RETRIES_CONFIG), equalTo(numberOfRetries)); } @Test public void shouldNotOverrideUserConfigCommitIntervalMsIfExactlyOnceEnabled() { final long commitIntervalMs = 73L; props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, commitIntervalMs); final StreamsConfig streamsConfig = new StreamsConfig(props); assertThat(streamsConfig.getLong(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG), equalTo(commitIntervalMs)); } @Test public void shouldUseNewConfigsWhenPresent() { final Properties props = minimalStreamsConfig(); props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Long().getClass()); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Long().getClass()); props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, MockTimestampExtractor.class); final StreamsConfig config = new StreamsConfig(props); assertTrue(config.defaultKeySerde() instanceof Serdes.LongSerde); assertTrue(config.defaultValueSerde() instanceof Serdes.LongSerde); assertTrue(config.defaultTimestampExtractor() instanceof MockTimestampExtractor); } @Test public void shouldUseCorrectDefaultsWhenNoneSpecified() { final StreamsConfig config = new StreamsConfig(minimalStreamsConfig()); assertTrue(config.defaultKeySerde() instanceof Serdes.ByteArraySerde); assertTrue(config.defaultValueSerde() instanceof Serdes.ByteArraySerde); assertTrue(config.defaultTimestampExtractor() instanceof FailOnInvalidTimestamp); } @Test public void shouldSpecifyCorrectKeySerdeClassOnError() { final Properties props = minimalStreamsConfig(); props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, MisconfiguredSerde.class); final StreamsConfig config = new StreamsConfig(props); try { config.defaultKeySerde(); fail("Test should throw a StreamsException"); } catch (StreamsException e) { assertEquals("Failed to configure key serde class org.apache.kafka.streams.StreamsConfigTest$MisconfiguredSerde", e.getMessage()); } } @SuppressWarnings("deprecation") @Test public void shouldSpecifyCorrectValueSerdeClassOnError() { final Properties props = minimalStreamsConfig(); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, MisconfiguredSerde.class); final StreamsConfig config = new StreamsConfig(props); try { config.defaultValueSerde(); fail("Test should throw a StreamsException"); } catch (StreamsException e) { assertEquals("Failed to configure value serde class org.apache.kafka.streams.StreamsConfigTest$MisconfiguredSerde", e.getMessage()); } } @Test public void shouldThrowExceptionIfMaxInflightRequestsGreatherThanFiveIfEosEnabled() { props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 7); props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); final StreamsConfig streamsConfig = new StreamsConfig(props); try { streamsConfig.getProducerConfigs("clientId"); fail("Should throw ConfigException when Eos is enabled and maxInFlight requests exceeds 5"); } catch (final ConfigException e) { assertEquals("max.in.flight.requests.per.connection can't exceed 5 when using the idempotent producer", e.getMessage()); } } @Test public void shouldSpecifyNoOptimizationWhenNotExplicitlyAddedToConfigs() { final String expectedOptimizeConfig = "none"; final String actualOptimizedConifig = streamsConfig.getString(TOPOLOGY_OPTIMIZATION); assertEquals("Optimization should be \"none\"", expectedOptimizeConfig, actualOptimizedConifig); } @Test public void shouldSpecifyOptimizationWhenNotExplicitlyAddedToConfigs() { final String expectedOptimizeConfig = "all"; props.put(TOPOLOGY_OPTIMIZATION, "all"); final StreamsConfig config = new StreamsConfig(props); final String actualOptimizedConifig = config.getString(TOPOLOGY_OPTIMIZATION); assertEquals("Optimization should be \"all\"", expectedOptimizeConfig, actualOptimizedConifig); } @Test(expected = ConfigException.class) public void shouldThrowConfigExceptionWhenOptimizationConfigNotValueInRange() { props.put(TOPOLOGY_OPTIMIZATION, "maybe"); new StreamsConfig(props); } static class MisconfiguredSerde implements Serde { @Override public void configure(final Map configs, final boolean isKey) { throw new RuntimeException("boom"); } @Override public void close() {} @Override public Serializer serializer() { return null; } @Override public Deserializer deserializer() { return null; } } public static class MockTimestampExtractor implements TimestampExtractor { @Override public long extract(final ConsumerRecord<Object, Object> record, final long previousTimestamp) { return 0; } } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.remote; import static java.util.concurrent.TimeUnit.SECONDS; import build.bazel.remote.execution.v2.ExecuteRequest; import build.bazel.remote.execution.v2.ExecuteResponse; import build.bazel.remote.execution.v2.ExecutionGrpc; import build.bazel.remote.execution.v2.ExecutionGrpc.ExecutionBlockingStub; import build.bazel.remote.execution.v2.WaitExecutionRequest; import com.google.common.base.Preconditions; import com.google.devtools.build.lib.authandtls.CallCredentialsProvider; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe; import com.google.devtools.build.lib.remote.RemoteRetrier.ProgressiveBackoff; import com.google.devtools.build.lib.remote.Retrier.Backoff; import com.google.devtools.build.lib.remote.common.OperationObserver; import com.google.devtools.build.lib.remote.common.RemoteExecutionClient; import com.google.devtools.build.lib.remote.options.RemoteOptions; import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import com.google.devtools.build.lib.remote.util.Utils; import com.google.longrunning.Operation; import com.google.longrunning.Operation.ResultCase; import com.google.rpc.Status; import io.grpc.Status.Code; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Iterator; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; import javax.annotation.Nullable; /** * A remote work executor that uses gRPC for communicating the work, inputs and outputs. * * <p>It differs from {@link GrpcRemoteExecutor} by setting timeout on each execution calls to * ensure we never be stuck due to network issues. * * @see <a href="https://docs.google.com/document/d/1NgDPsCIwprDdqC1zj0qQrh5KGK2hQTSTux1DAvi4rSc"> * Keepalived Remote Execution</a> */ @ThreadSafe public class ExperimentalGrpcRemoteExecutor implements RemoteExecutionClient { private final RemoteOptions remoteOptions; private final ReferenceCountedChannel channel; private final CallCredentialsProvider callCredentialsProvider; private final RemoteRetrier retrier; private final AtomicBoolean closed = new AtomicBoolean(); public ExperimentalGrpcRemoteExecutor( RemoteOptions remoteOptions, ReferenceCountedChannel channel, CallCredentialsProvider callCredentialsProvider, RemoteRetrier retrier) { this.remoteOptions = remoteOptions; this.channel = channel; this.callCredentialsProvider = callCredentialsProvider; this.retrier = retrier; } private ExecutionBlockingStub executionBlockingStub() { return ExecutionGrpc.newBlockingStub(channel) .withInterceptors(TracingMetadataUtils.attachMetadataFromContextInterceptor()) .withCallCredentials(callCredentialsProvider.getCallCredentials()) .withDeadlineAfter(remoteOptions.remoteTimeout.getSeconds(), SECONDS); } private static class Execution { private final ExecuteRequest request; private final OperationObserver observer; private final RemoteRetrier retrier; private final CallCredentialsProvider callCredentialsProvider; // Count retry times for Execute() calls and can't be reset. private final Backoff executeBackoff; // Count retry times for WaitExecution() calls and is reset when we receive any response from // the server that is not an error. private final ProgressiveBackoff waitExecutionBackoff; private final Supplier<ExecutionBlockingStub> executionBlockingStubSupplier; // Last response (without error) we received from server. private Operation lastOperation; Execution( ExecuteRequest request, OperationObserver observer, RemoteRetrier retrier, CallCredentialsProvider callCredentialsProvider, Supplier<ExecutionBlockingStub> executionBlockingStubSupplier) { this.request = request; this.observer = observer; this.retrier = retrier; this.callCredentialsProvider = callCredentialsProvider; this.executeBackoff = this.retrier.newBackoff(); this.waitExecutionBackoff = new ProgressiveBackoff(this.retrier::newBackoff); this.executionBlockingStubSupplier = executionBlockingStubSupplier; } ExecuteResponse start() throws IOException, InterruptedException { // Execute has two components: the Execute call and (optionally) the WaitExecution call. // This is the simple flow without any errors: // // - A call to Execute returns streamed updates on an Operation object. // - We wait until the Operation is finished. // // Error possibilities: // - An Execute call may fail with a retriable error (raise a StatusRuntimeException). // - If the failure occurred before the first Operation is returned and tells us the // execution is accepted, we retry the call. // - Otherwise, we call WaitExecution on the Operation. // - A WaitExecution call may fail with a retriable error (raise a StatusRuntimeException). // In that case, we retry the WaitExecution call on the same operation object. // - A WaitExecution call may fail with a NOT_FOUND error (raise a StatusRuntimeException). // That means the Operation was lost on the server, and we will retry to Execute. // - Any call can return an Operation object with an error status in the result. Such // Operations are completed and failed; however, some of these errors may be retriable. // These errors should trigger a retry of the Execute call, resulting in a new Operation. Preconditions.checkState(lastOperation == null); ExecuteResponse response = null; // Exit the loop as long as we get a response from either Execute() or WaitExecution(). while (response == null) { // We use refreshIfUnauthenticated inside retry block. If use it outside, retrier will stop // retrying when received a unauthenticated error, and propagate to refreshIfUnauthenticated // which will then call retrier again. It will reset the retry time counter so we could // retry more than --remote_retry times which is not expected. response = retrier.execute( () -> Utils.refreshIfUnauthenticated(this::execute, callCredentialsProvider), executeBackoff); // If no response from Execute(), use WaitExecution() in a "loop" which is implemented // inside the retry block. // // The cases to exit the loop: // 1. Received the final response. // 2. Received a un-retriable gRPC error. // 3. Received NOT_FOUND error where we will retry Execute() (by returning null). // 4. Received consecutive retriable gRPC errors (up to max retry times). if (response == null) { response = retrier.execute( () -> Utils.refreshIfUnauthenticated(this::waitExecution, callCredentialsProvider), waitExecutionBackoff); } } return response; } @Nullable ExecuteResponse execute() throws IOException { Preconditions.checkState(lastOperation == null); try { Iterator<Operation> operationStream = executionBlockingStubSupplier.get().execute(request); return handleOperationStream(operationStream); } catch (StatusRuntimeException e) { // If lastOperation is not null, we know the execution request is accepted by the server. In // this case, we will fallback to WaitExecution() loop when the stream is broken. if (lastOperation != null) { // By returning null, we are going to fallback to WaitExecution() loop. return null; } throw new IOException(e); } } @Nullable ExecuteResponse waitExecution() throws IOException { Preconditions.checkState(lastOperation != null); WaitExecutionRequest request = WaitExecutionRequest.newBuilder().setName(lastOperation.getName()).build(); try { Iterator<Operation> operationStream = executionBlockingStubSupplier.get().waitExecution(request); return handleOperationStream(operationStream); } catch (StatusRuntimeException e) { // A NOT_FOUND error means Operation was lost on the server, retry Execute(). // // However, we only retry Execute() if executeBackoff should retry. Also increase the retry // counter at the same time (done by nextDelayMillis()). if (e.getStatus().getCode() == Code.NOT_FOUND && executeBackoff.nextDelayMillis(e) >= 0) { lastOperation = null; return null; } throw new IOException(e); } } /** Process a stream of operations from Execute() or WaitExecution(). */ ExecuteResponse handleOperationStream(Iterator<Operation> operationStream) throws IOException { try { while (operationStream.hasNext()) { Operation operation = operationStream.next(); ExecuteResponse response = extractResponseOrThrowIfError(operation); // At this point, we successfully received a response that is not an error. lastOperation = operation; // We don't want to reset executeBackoff since if there is an error: // 1. If happened before we received a first response, we want to ensure the retry // counter // is increased and call Execute() again. // 2. Otherwise, we will fallback to WaitExecution() loop. // // This also prevent us from being stuck at a infinite loop: // Execute() -> WaitExecution() -> Execute() // // However, we do want to reset waitExecutionBackoff so we can "infinitely" wait // for the execution to complete as long as they are making progress (by returning a // response that is not an error). waitExecutionBackoff.reset(); // Update execution progress to the observer. // // After called `execute` above, the action is actually waiting for an available gRPC // connection to be sent. Once we get a reply from server, we know the connection is up // and indicate to the caller the fact by forwarding the `operation`. // // The accurate execution status of the action relies on the server // implementation: // 1. Server can reply the accurate status in `operation.metadata.stage`; // 2. Server may send a reply without metadata. In this case, we assume the action is // accepted by the server and will be executed ASAP; // 3. Server may execute the action silently and send a reply once it is done. observer.onNext(operation); if (response != null) { return response; } } // The operation completed successfully but without a result. throw new IOException("Remote server error: execution terminated with no result"); } finally { close(operationStream); } } static void close(Iterator<Operation> operationStream) { // The blocking streaming call closes correctly only when trailers and a Status are received // from the server so that onClose() is called on this call's CallListener. Under normal // circumstances (no cancel/errors), these are guaranteed to be sent by the server only if // operationStream.hasNext() has been called after all replies from the stream have been // consumed. try { while (operationStream.hasNext()) { operationStream.next(); } } catch (StatusRuntimeException e) { // Cleanup: ignore exceptions, because the meaningful errors have already been propagated. } } static void throwIfError(Status status, @Nullable ExecuteResponse resp) { if (status.getCode() == Code.OK.value()) { return; } throw new ExecutionStatusException(status, resp); } @Nullable static ExecuteResponse extractResponseOrThrowIfError(Operation operation) throws IOException { if (operation.getResultCase() == Operation.ResultCase.ERROR) { throwIfError(operation.getError(), null); } if (operation.getDone()) { if (operation.getResultCase() == ResultCase.RESULT_NOT_SET) { throw new ExecutionStatusException( Status.newBuilder() .setCode(com.google.rpc.Code.DATA_LOSS_VALUE) .setMessage("Unexpected result of remote execution: no result") .build(), null); } Preconditions.checkState(operation.getResultCase() != Operation.ResultCase.RESULT_NOT_SET); ExecuteResponse response = operation.getResponse().unpack(ExecuteResponse.class); if (response.hasStatus()) { throwIfError(response.getStatus(), response); } if (!response.hasResult()) { throw new ExecutionStatusException( Status.newBuilder() .setCode(com.google.rpc.Code.DATA_LOSS_VALUE) .setMessage("Unexpected result of remote execution: no result") .build(), response); } return response; } return null; } } @Override public ExecuteResponse executeRemotely(ExecuteRequest request, OperationObserver observer) throws IOException, InterruptedException { Execution execution = new Execution( request, observer, retrier, callCredentialsProvider, this::executionBlockingStub); return execution.start(); } @Override public void close() { if (closed.getAndSet(true)) { return; } channel.release(); } }
// Copyright 2014 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gwtorm.schema.sql; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.junit.Assume.assumeNoException; import com.google.gwtorm.data.Address; import com.google.gwtorm.data.Person; import com.google.gwtorm.data.PhoneBookDb; import com.google.gwtorm.data.PhoneBookDb2; import com.google.gwtorm.jdbc.Database; import com.google.gwtorm.jdbc.JdbcExecutor; import com.google.gwtorm.jdbc.JdbcSchema; import com.google.gwtorm.jdbc.SimpleDataSource; import com.google.gwtorm.server.OrmDuplicateKeyException; import com.google.gwtorm.server.OrmException; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.sql.DriverManager; import java.sql.SQLException; import java.util.Collections; import java.util.Properties; import java.util.Set; public class DialectMaxDBTest extends SqlDialectTest { private static final String MAXDB_URL_KEY = "maxdb.url"; private static final String MAXDB_USER_KEY = "maxdb.user"; private static final String MAXDB_PASSWORD_KEY = "maxdb.password"; private static final String MAXDB_DRIVER = "com.sap.dbtech.jdbc.DriverSapDB"; @Before public void setUp() throws Exception { try { Class.forName(MAXDB_DRIVER); } catch (Exception e) { assumeNoException(e); } final String url = System.getProperty(MAXDB_URL_KEY); final String user = System.getProperty(MAXDB_USER_KEY); final String pass = System.getProperty(MAXDB_PASSWORD_KEY); db = DriverManager.getConnection(url, user, pass); executor = new JdbcExecutor(db); dialect = new DialectMaxDB().refine(db); final Properties p = new Properties(); p.setProperty("driver", MAXDB_DRIVER); p.setProperty("url", db.getMetaData().getURL()); p.setProperty("user", user); p.setProperty("password", pass); phoneBook = new Database<>(new SimpleDataSource(p), PhoneBookDb.class); phoneBook2 = new Database<>(new SimpleDataSource(p), PhoneBookDb2.class); } private void drop(String drop) { try { execute("DROP " + drop); } catch (OrmException e) { } } @After public void tearDown() { if (executor == null) { return; } // Database content must be flushed because // tests assume that the database is empty drop("SEQUENCE address_id"); drop("SEQUENCE cnt"); drop("TABLE addresses"); drop("TABLE foo"); drop("TABLE bar"); drop("TABLE people"); if (executor != null) { executor.close(); } executor = null; if (db != null) { try { db.close(); } catch (SQLException e) { throw new RuntimeException("Cannot close database", e); } } db = null; } private void execute(final String sql) throws OrmException { executor.execute(sql); } @Test public void testListSequences() throws OrmException, SQLException { assertTrue(dialect.listSequences(db).isEmpty()); execute("CREATE SEQUENCE cnt"); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listSequences(db); assertEquals(1, s.size()); assertTrue(s.contains("cnt")); assertFalse(s.contains("foo")); } @Test public void testListTables() throws OrmException, SQLException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE SEQUENCE cnt"); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listTables(db); assertEquals(1, s.size()); assertFalse(s.contains("cnt")); assertTrue(s.contains("foo")); } @Test public void testListIndexes() throws OrmException, SQLException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE SEQUENCE cnt"); execute("CREATE TABLE foo (cnt INT, bar INT, baz INT)"); execute("CREATE UNIQUE INDEX FOO_PRIMARY_IND ON foo(cnt)"); execute("CREATE INDEX FOO_SECOND_IND ON foo(bar, baz)"); Set<String> s = dialect.listIndexes(db, "foo"); assertEquals(2, s.size()); assertTrue(s.contains("foo_primary_ind")); assertTrue(s.contains("foo_second_ind")); dialect.dropIndex(executor, "foo", "foo_primary_ind"); dialect.dropIndex(executor, "foo", "foo_second_ind"); assertEquals(Collections.emptySet(), dialect.listIndexes(db, "foo")); } @Test public void testUpgradeSchema() throws SQLException, OrmException { final PhoneBookDb p = phoneBook.open(); try { p.updateSchema(executor); execute("CREATE SEQUENCE cnt"); execute("CREATE TABLE foo (cnt INT)"); execute("ALTER TABLE people ADD fake_name VARCHAR(20)"); execute("ALTER TABLE people DROP COLUMN registered"); execute("DROP TABLE addresses"); execute("DROP SEQUENCE address_id"); Set<String> sequences, tables; p.updateSchema(executor); sequences = dialect.listSequences(db); tables = dialect.listTables(db); assertTrue(sequences.contains("cnt")); assertTrue(tables.contains("foo")); assertTrue(sequences.contains("address_id")); assertTrue(tables.contains("addresses")); p.pruneSchema(executor); sequences = dialect.listSequences(db); tables = dialect.listTables(db); assertFalse(sequences.contains("cnt")); assertFalse(tables.contains("foo")); final Person.Key pk = new Person.Key("Bob"); final Person bob = new Person(pk, p.nextAddressId()); p.people().insert(asList(bob)); final Address addr = new Address(new Address.Key(pk, "home"), "some place"); p.addresses().insert(asList(addr)); } finally { p.close(); } final PhoneBookDb2 p2 = phoneBook2.open(); try { ((JdbcSchema) p2).renameField(executor, "people", "registered", "isRegistered"); } finally { p2.close(); } } @Test public void testRenameTable() throws SQLException, OrmException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listTables(db); assertEquals(1, s.size()); assertTrue(s.contains("foo")); final PhoneBookDb p = phoneBook.open(); try { ((JdbcSchema) p).renameTable(executor, "foo", "bar"); } finally { p.close(); } s = dialect.listTables(db); assertTrue(s.contains("bar")); assertFalse(s.contains("for")); } @Test public void testInsert() throws OrmException { final PhoneBookDb p = phoneBook.open(); try { p.updateSchema(executor); final Person.Key pk = new Person.Key("Bob"); final Person bob = new Person(pk, p.nextAddressId()); p.people().insert(asList(bob)); try { p.people().insert(asList(bob)); fail(); } catch (OrmDuplicateKeyException duprec) { // expected } } finally { p.close(); } } @Test public void testConstraintViolationOnIndex() throws OrmException { final PhoneBookDb p = phoneBook.open(); try { p.updateSchema(executor); execute("CREATE UNIQUE INDEX idx ON people (age)"); try { final Person.Key pk = new Person.Key("Bob"); final Person bob = new Person(pk, p.nextAddressId()); bob.setAge(40); p.people().insert(asList(bob)); final Person.Key joePk = new Person.Key("Joe"); Person joe = new Person(joePk, p.nextAddressId()); joe.setAge(40); try { p.people().insert(asList(joe)); fail(); } catch (OrmDuplicateKeyException duprec) { fail(); } catch (OrmException noDuprec) { // expeceted } } finally { execute("DROP INDEX idx ON people"); } } finally { p.close(); } } @Test public void testUpdate() throws OrmException { final PhoneBookDb p = phoneBook.open(); try { p.updateSchema(executor); final Person.Key pk = new Person.Key("Bob"); Person bob = new Person(pk, p.nextAddressId()); bob.setAge(40); p.people().insert(asList(bob)); bob.setAge(50); p.people().update(asList(bob)); bob = p.people().get(pk); assertEquals(50, bob.age()); } finally { p.close(); } } @Test public void testUpsert() throws OrmException { final PhoneBookDb p = phoneBook.open(); try { p.updateSchema(executor); final Person.Key bobPk = new Person.Key("Bob"); Person bob = new Person(bobPk, p.nextAddressId()); bob.setAge(40); p.people().insert(asList(bob)); final Person.Key joePk = new Person.Key("Joe"); Person joe = new Person(joePk, p.nextAddressId()); bob.setAge(50); p.people().upsert(asList(bob, joe)); bob = p.people().get(bobPk); assertEquals(50, bob.age()); assertNotNull(p.people().get(joePk)); } finally { p.close(); } } }
/*<license> Copyright 2004 - $Date: 2008-06-26 21:33:40 +0200 (Thu, 26 Jun 2008) $ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.vernacular.persistence_III.dao.hibernate2; import static org.ppwcode.metainfo_I.License.Type.APACHE_V2; import java.io.Serializable; import java.util.ConcurrentModificationException; import java.util.List; import net.sf.hibernate.Criteria; import net.sf.hibernate.HibernateException; import net.sf.hibernate.Query; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.ppwcode.metainfo_I.Copyright; import org.ppwcode.metainfo_I.License; import org.ppwcode.metainfo_I.vcs.SvnInfo; import org.ppwcode.vernacular.persistence_III.PersistenceExternalError; import org.ppwcode.vernacular.persistence_III.PersistentBean; import org.ppwcode.vernacular.persistence_III.dao.PagingList; import org.toryt.annotations_I.Basic; import org.toryt.annotations_I.Expression; import org.toryt.annotations_I.Invars; import org.toryt.annotations_I.MethodContract; import org.toryt.annotations_I.Throw; /** * A list of lists, that contains the result of a Hibernate query * using paging. Each page, except for the last, * has size {@link #getPageSize()}. * When the virtual resultset on the DB changes during iteration, * we throw a {@link ConcurrentModificationException} when the next * page is requested. * * @author Jan Dockx * @author Ruben Vandeginste * @author Peopleware n.v. * */ @Copyright("2004 - $Date: 2008-06-26 21:33:40 +0200 (Thu, 26 Jun 2008) $, PeopleWare n.v.") @License(APACHE_V2) @SvnInfo(revision = "$Revision: 1402 $", date = "$Date: 2008-06-26 21:33:40 +0200 (Thu, 26 Jun 2008) $") public final class Hibernate2PagingList<_Id_ extends Serializable, _PersistentBean_ extends PersistentBean<_Id_>> extends PagingList<_Id_, _PersistentBean_> { private static final Log LOG = LogFactory.getLog(Hibernate2PagingList.class); /*<construction>*/ //------------------------------------------------------------------ @MethodContract( pre = { @Expression("_query != null"), @Expression("_countQuery != null"), @Expression("_pageSize > 0") }, post = { @Expression("^query == _query"), @Expression("^countQuery == _countQuery"), @Expression("^pageSize == _pageSize") }, exc = { @Throw(type = PersistenceExternalError.class, cond = @Expression("true")) } ) public Hibernate2PagingList(Query query, Query countQuery, int pageSize) throws PersistenceExternalError { super(pageSize, retrieveRecordCount(countQuery)); assert query != null; assert countQuery != null; $query = query; $countQuery = countQuery; } @MethodContract( pre = { @Expression("_criteria != null"), @Expression("_countQuery != null"), @Expression("_pageSize > 0") }, post = { @Expression("^criteria == _criteria"), @Expression("^countQuery == _countQuery"), @Expression("^pageSize == _pageSize") }, exc = { @Throw(type = PersistenceExternalError.class, cond = @Expression("true")) } ) public Hibernate2PagingList(Criteria criteria, Query countQuery, int pageSize) throws PersistenceExternalError { super(pageSize, retrieveRecordCount(countQuery)); assert criteria != null; assert countQuery != null; $criteria = criteria; $countQuery = countQuery; } /*</construction>*/ /*<property name="query">*/ //------------------------------------------------------------------ @Basic public final Query getQuery() { return $query; } @Invars({ @Expression("$query != null") }) private Query $query; /*</property>*/ /*<property name="criteria">*/ //------------------------------------------------------------------ @Basic public final Criteria getCriteria() { return $criteria; } @Invars({ @Expression("$criteria != null") }) private Criteria $criteria; /*</property>*/ /*<property name="countQuery">*/ //------------------------------------------------------------------ @Basic public final Query getCountQuery() { return $countQuery; } private static int retrieveRecordCount(Query countQuery) throws PersistenceExternalError { try { LOG.debug("retrieving record count"); int result = ((Integer)countQuery.uniqueResult()).intValue(); LOG.debug("record count is " + result); return result; } catch (HibernateException e) { throw new PersistenceExternalError("cannot retrieve count", e); } } @Override protected final int retrieveRecordCount() throws PersistenceExternalError { return retrieveRecordCount(getCountQuery()); } @Invars({ @Expression("$countQuery != null") }) private Query $countQuery; /*</property>*/ @Override protected final List<_PersistentBean_> retrievePage(int retrieveSize, int startOfPage) throws PersistenceExternalError { try { List<_PersistentBean_> page = null; if ($criteria != null) { page = retrievePageCriteria(retrieveSize, startOfPage); } else if ($query != null) { page = retrievePageQuery(retrieveSize, startOfPage); } else { assert false : "Cannot happen"; } LOG.debug("page retrieved successfully"); return page; } catch (HibernateException hExc) { throw new PersistenceExternalError("cannot retrieve page", hExc); } } private List<_PersistentBean_> retrievePageCriteria(int retrieveSize, int startOfPage) throws HibernateException { try { $criteria.setMaxResults(retrieveSize); // first and last record is for check only (depending on booleans) $criteria.setFirstResult(startOfPage); @SuppressWarnings("unchecked") List<_PersistentBean_> page = $criteria.list(); return page; } catch (ClassCastException ccExc) { throw new PersistenceExternalError("retrieved list was not a list of PersistentBean objects", ccExc); } } private List<_PersistentBean_> retrievePageQuery(int retrieveSize, int startOfPage) throws HibernateException { try { $query.setMaxResults(retrieveSize); // first and last record is for check only (depending on booleans) $query.setFirstResult(startOfPage); @SuppressWarnings("unchecked") List<_PersistentBean_> page = $query.list(); return page; } catch (ClassCastException ccExc) { throw new PersistenceExternalError("retrieved list was not a list of PersistentBean objets", ccExc); } } }
/* * Copyright (c) 2010-2014 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.component.wizard.resource.component.schemahandling; import com.evolveum.midpoint.schema.constants.ExpressionConstants; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.web.component.form.multivalue.MultiValueTextEditPanel; import com.evolveum.midpoint.web.component.util.SimplePanel; import com.evolveum.midpoint.web.component.wizard.WizardUtil; import com.evolveum.midpoint.web.component.wizard.resource.component.schemahandling.modal.MappingEditorDialog; import com.evolveum.midpoint.web.component.wizard.resource.dto.MappingTypeDto; import com.evolveum.midpoint.web.util.InfoTooltipBehavior; import com.evolveum.midpoint.web.util.WebMiscUtil; import com.evolveum.midpoint.xml.ns._public.common.common_3.*; import com.evolveum.prism.xml.ns._public.types_3.ItemPathType; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.EnumChoiceRenderer; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import java.util.List; /** * @author shood * */ public class ResourceActivationEditor extends SimplePanel<ResourceActivationDefinitionType>{ private static final Trace LOGGER = TraceManager.getTrace(ResourceActivationEditor.class); private static final String ID_EXISTENCE_FS = "existenceFetchStrategy"; private static final String ID_EXISTENCE_OUT = "existenceOutbound"; private static final String ID_EXISTENCE_IN = "existenceInbound"; private static final String ID_ADM_STATUS_FS = "admStatusFetchStrategy"; private static final String ID_ADM_STATUS_OUT = "admStatusOutbound"; private static final String ID_ADM_STATUS_IN = "admStatusInbound"; private static final String ID_VALID_FROM_FS = "validFromFetchStrategy"; private static final String ID_VALID_FROM_OUT = "validFromOutbound"; private static final String ID_VALID_FROM_IN = "validFromInbound"; private static final String ID_VALID_TO_FS = "validToFetchStrategy"; private static final String ID_VALID_TO_OUT = "validToOutbound"; private static final String ID_VALID_TO_IN = "validToInbound"; private static final String ID_MODAL_MAPPING = "mappingEditor"; private static final String ID_T_EX_FETCH = "existenceFetchStrategyTooltip"; private static final String ID_T_EX_OUT = "existenceOutboundTooltip"; private static final String ID_T_EX_IN = "existenceInboundTooltip"; private static final String ID_T_ADM_FETCH = "admStatusFetchStrategyTooltip"; private static final String ID_T_ADM_OUT = "admStatusOutboundTooltip"; private static final String ID_T_ADM_IN = "admStatusInboundTooltip"; private static final String ID_T_VALID_F_FETCH = "validFromFetchStrategyTooltip"; private static final String ID_T_VALID_F_OUT = "validFromOutboundTooltip"; private static final String ID_T_VALID_F_IN = "validFromInboundTooltip"; private static final String ID_T_VALID_T_FETCH = "validToFetchStrategyTooltip"; private static final String ID_T_VALID_T_OUT = "validToOutboundTooltip"; private static final String ID_T_VALID_T_IN = "validToInboundTooltip"; //Default mapping inbound/outbound sources/targets public static final String EXISTENCE_DEFAULT_SOURCE = "&" + ExpressionConstants.VAR_LEGAL.getLocalPart(); public static final String ADM_STATUS_OUT_SOURCE_DEFAULT = "&" + ExpressionConstants.VAR_FOCUS.getLocalPart() + "/activation/administrativeStatus"; public static final String ADM_STATUS_OUT_TARGET_DEFAULT = "&" + ExpressionConstants.VAR_PROJECTION.getLocalPart() + "/activation/administrativeStatus"; public static final String ADM_STATUS_IN_SOURCE_DEFAULT = "&" + ExpressionConstants.VAR_PROJECTION.getLocalPart() + "/activation/administrativeStatus"; public static final String ADM_STATUS_IN_TARGET_DEFAULT = "&" + ExpressionConstants.VAR_FOCUS.getLocalPart() + "/activation/administrativeStatus"; public static final String VALID_TO_OUT_SOURCE_DEFAULT = "&" + ExpressionConstants.VAR_FOCUS.getLocalPart() + "/activation/validTo"; public static final String VALID_TO_OUT_TARGET_DEFAULT = "&" + ExpressionConstants.VAR_PROJECTION.getLocalPart() + "/activation/validTo"; public static final String VALID_TO_IN_SOURCE_DEFAULT = "&" + ExpressionConstants.VAR_PROJECTION.getLocalPart() + "/activation/validTo"; public static final String VALID_TO_IN_TARGET_DEFAULT = "&" + ExpressionConstants.VAR_FOCUS.getLocalPart() + "/activation/validTo"; public static final String VALID_FROM_OUT_SOURCE_DEFAULT = "&" + ExpressionConstants.VAR_FOCUS.getLocalPart() + "/activation/validFrom"; public static final String VALID_FROM_OUT_TARGET_DEFAULT = "&" + ExpressionConstants.VAR_PROJECTION.getLocalPart() + "/activation/validFrom"; public static final String VALID_FROM_IN_SOURCE_DEFAULT = "&" + ExpressionConstants.VAR_PROJECTION.getLocalPart() + "/activation/validFrom"; public static final String VALID_FROM_IN_TARGET_DEFAULT = "&" + ExpressionConstants.VAR_FOCUS.getLocalPart() + "/activation/validFrom"; private boolean isInitialized = false; public ResourceActivationEditor(String id, IModel<ResourceActivationDefinitionType> model){ super(id, model); } @Override public IModel<ResourceActivationDefinitionType> getModel() { IModel<ResourceActivationDefinitionType> activationModel = super.getModel(); if(activationModel.getObject() == null){ activationModel.setObject(new ResourceActivationDefinitionType()); } if(!isInitialized){ prepareActivationObject(activationModel.getObject()); isInitialized = true; } return activationModel; } private void prepareActivationObject(ResourceActivationDefinitionType activation){ if(activation.getExistence() == null){ activation.setExistence(new ResourceBidirectionalMappingType()); } else { for(MappingType mapping: activation.getExistence().getInbound()){ if(mapping.equals(new MappingType())){ MappingSourceDeclarationType source = new MappingSourceDeclarationType(); source.setPath(new ItemPathType(EXISTENCE_DEFAULT_SOURCE)); mapping.getSource().add(source); } } } if(activation.getAdministrativeStatus() == null){ activation.setAdministrativeStatus(new ResourceBidirectionalMappingType()); } else { for(MappingType outbound: activation.getAdministrativeStatus().getOutbound()){ if(outbound.equals(new MappingType())){ MappingSourceDeclarationType source = new MappingSourceDeclarationType(); source.setPath(new ItemPathType(ADM_STATUS_OUT_SOURCE_DEFAULT)); outbound.getSource().add(source); MappingTargetDeclarationType target = new MappingTargetDeclarationType(); target.setPath(new ItemPathType(ADM_STATUS_OUT_TARGET_DEFAULT)); outbound.setTarget(target); } } for(MappingType inbound: activation.getAdministrativeStatus().getInbound()){ if(inbound.equals(new MappingType())){ MappingSourceDeclarationType source = new MappingSourceDeclarationType(); source.setPath(new ItemPathType(ADM_STATUS_IN_SOURCE_DEFAULT)); inbound.getSource().add(source); MappingTargetDeclarationType target = new MappingTargetDeclarationType(); target.setPath(new ItemPathType(ADM_STATUS_IN_TARGET_DEFAULT)); inbound.setTarget(target); } } } if(activation.getValidFrom() == null){ activation.setValidFrom(new ResourceBidirectionalMappingType()); } else { for(MappingType outbound: activation.getValidFrom().getOutbound()){ if(outbound.equals(new MappingType())){ MappingSourceDeclarationType source = new MappingSourceDeclarationType(); source.setPath(new ItemPathType(VALID_FROM_OUT_SOURCE_DEFAULT)); outbound.getSource().add(source); MappingTargetDeclarationType target = new MappingTargetDeclarationType(); target.setPath(new ItemPathType(VALID_FROM_OUT_TARGET_DEFAULT)); outbound.setTarget(target); } } for(MappingType inbound: activation.getValidFrom().getInbound()){ if(inbound.equals(new MappingType())){ MappingSourceDeclarationType source = new MappingSourceDeclarationType(); source.setPath(new ItemPathType(VALID_FROM_IN_SOURCE_DEFAULT)); inbound.getSource().add(source); MappingTargetDeclarationType target = new MappingTargetDeclarationType(); target.setPath(new ItemPathType(VALID_FROM_IN_TARGET_DEFAULT)); inbound.setTarget(target); } } } if(activation.getValidTo() == null){ activation.setValidTo(new ResourceBidirectionalMappingType()); } else { for(MappingType outbound: activation.getValidTo().getOutbound()){ if(outbound.equals(new MappingType())){ MappingSourceDeclarationType source = new MappingSourceDeclarationType(); source.setPath(new ItemPathType(VALID_TO_OUT_SOURCE_DEFAULT)); outbound.getSource().add(source); MappingTargetDeclarationType target = new MappingTargetDeclarationType(); target.setPath(new ItemPathType(VALID_TO_OUT_TARGET_DEFAULT)); outbound.setTarget(target); } } for(MappingType inbound: activation.getValidTo().getInbound()){ if(inbound.equals(new MappingType())){ MappingSourceDeclarationType source = new MappingSourceDeclarationType(); source.setPath(new ItemPathType(VALID_TO_IN_SOURCE_DEFAULT)); inbound.getSource().add(source); MappingTargetDeclarationType target = new MappingTargetDeclarationType(); target.setPath(new ItemPathType(VALID_TO_IN_TARGET_DEFAULT)); inbound.setTarget(target); } } } } @Override protected void initLayout(){ prepareActivationPanelBody(ResourceActivationDefinitionType.F_EXISTENCE.getLocalPart(), ID_EXISTENCE_FS, ID_EXISTENCE_OUT, ID_EXISTENCE_IN); prepareActivationPanelBody(ResourceActivationDefinitionType.F_ADMINISTRATIVE_STATUS.getLocalPart(), ID_ADM_STATUS_FS, ID_ADM_STATUS_OUT, ID_ADM_STATUS_IN); prepareActivationPanelBody(ResourceActivationDefinitionType.F_VALID_FROM.getLocalPart(), ID_VALID_FROM_FS, ID_VALID_FROM_OUT, ID_VALID_FROM_IN); prepareActivationPanelBody(ResourceActivationDefinitionType.F_VALID_TO.getLocalPart(), ID_VALID_TO_FS, ID_VALID_TO_OUT, ID_VALID_TO_IN); Label exFetchTooltip = new Label(ID_T_EX_FETCH); exFetchTooltip.add(new InfoTooltipBehavior()); add(exFetchTooltip); Label exOutTooltip = new Label(ID_T_EX_OUT); exOutTooltip.add(new InfoTooltipBehavior()); add(exOutTooltip); Label exInTooltip = new Label(ID_T_EX_IN); exInTooltip.add(new InfoTooltipBehavior()); add(exInTooltip); Label admFetchTooltip = new Label(ID_T_ADM_FETCH); admFetchTooltip.add(new InfoTooltipBehavior()); add(admFetchTooltip); Label admOutTooltip = new Label(ID_T_ADM_OUT); admOutTooltip.add(new InfoTooltipBehavior()); add(admOutTooltip); Label admInTooltip = new Label(ID_T_ADM_IN); admInTooltip.add(new InfoTooltipBehavior()); add(admInTooltip); Label validFromFetchTooltip = new Label(ID_T_VALID_F_FETCH); validFromFetchTooltip.add(new InfoTooltipBehavior()); add(validFromFetchTooltip); Label validFromOutTooltip = new Label(ID_T_VALID_F_OUT); validFromOutTooltip.add(new InfoTooltipBehavior()); add(validFromOutTooltip); Label validFromInTooltip = new Label(ID_T_VALID_F_IN); validFromInTooltip.add(new InfoTooltipBehavior()); add(validFromInTooltip); Label validToFetchTooltip = new Label(ID_T_VALID_T_FETCH); validToFetchTooltip.add(new InfoTooltipBehavior()); add(validToFetchTooltip); Label validToOutTooltip = new Label(ID_T_VALID_T_OUT); validToOutTooltip.add(new InfoTooltipBehavior()); add(validToOutTooltip); Label validToInTooltip = new Label(ID_T_VALID_T_IN); validToInTooltip.add(new InfoTooltipBehavior()); add(validToInTooltip); initModals(); } private void prepareActivationPanelBody(String containerValue, String fetchStrategyId, String outboundId, String inboundId){ DropDownChoice fetchStrategy = new DropDownChoice<>(fetchStrategyId, new PropertyModel<AttributeFetchStrategyType>(getModel(), containerValue + ".fetchStrategy"), WebMiscUtil.createReadonlyModelFromEnum(AttributeFetchStrategyType.class), new EnumChoiceRenderer<AttributeFetchStrategyType>(this)); fetchStrategy.setNullValid(true); add(fetchStrategy); MultiValueTextEditPanel outbound = new MultiValueTextEditPanel<MappingType>(outboundId, new PropertyModel<List<MappingType>>(getModel(), containerValue + ".outbound"), false){ @Override protected IModel<String> createTextModel(final IModel<MappingType> model) { return new Model<String>() { @Override public String getObject() { return MappingTypeDto.createMappingLabel(model.getObject(), LOGGER, getPageBase().getPrismContext(), getString("MappingType.label.placeholder"), getString("MultiValueField.nameNotSpecified")); } }; } @Override protected MappingType createNewEmptyItem(){ return WizardUtil.createEmptyMapping(); } @Override protected void editPerformed(AjaxRequestTarget target, MappingType object){ mappingEditPerformed(target, object, false); } }; add(outbound); MultiValueTextEditPanel inbound = new MultiValueTextEditPanel<MappingType>(inboundId, new PropertyModel<List<MappingType>>(getModel(), containerValue + ".inbound"), false){ @Override protected IModel<String> createTextModel(final IModel<MappingType> model) { return new Model<String>() { @Override public String getObject() { return MappingTypeDto.createMappingLabel(model.getObject(), LOGGER, getPageBase().getPrismContext(), getString("MappingType.label.placeholder"), getString("MultiValueField.nameNotSpecified")); } }; } @Override protected MappingType createNewEmptyItem(){ return WizardUtil.createEmptyMapping(); } @Override protected void editPerformed(AjaxRequestTarget target, MappingType object){ mappingEditPerformed(target, object, true); } }; inbound.setOutputMarkupId(true); add(inbound); } private void initModals(){ ModalWindow mappingEditor = new MappingEditorDialog(ID_MODAL_MAPPING, null){ @Override public void updateComponents(AjaxRequestTarget target){ target.add(ResourceActivationEditor.this); } }; add(mappingEditor); } private void mappingEditPerformed(AjaxRequestTarget target, MappingType mapping, boolean isInbound){ MappingEditorDialog window = (MappingEditorDialog) get(ID_MODAL_MAPPING); window.updateModel(target, mapping, isInbound); window.show(target); } }
// Copyright 2004-present Facebook. All Rights Reserved. package com.facebook.stetho.dumpapp.plugins; import android.annotation.TargetApi; import android.content.Context; import android.content.SharedPreferences; import android.os.Build; import android.text.TextUtils; import com.facebook.stetho.dumpapp.DumpUsageException; import com.facebook.stetho.dumpapp.DumperContext; import com.facebook.stetho.dumpapp.DumperPlugin; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.File; import java.io.PrintStream; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; public class SharedPreferencesDumperPlugin implements DumperPlugin { private static final String XML_SUFFIX = ".xml"; private static final String NAME = "prefs"; private final Context mAppContext; public SharedPreferencesDumperPlugin(Context context) { mAppContext = context.getApplicationContext(); } @Override public String getName() { return NAME; } @Override public void dump(DumperContext dumpContext) throws DumpUsageException { PrintStream writer = dumpContext.getStdout(); List<String> args = dumpContext.getArgsAsList(); String commandName = args.isEmpty() ? "" : args.remove(0); if (commandName.equals("print")) { doPrint(writer, args); } else if (commandName.equals("write")) { doWrite(args); } else { doUsage(writer); } } /** * Executes command to update one value in the shared preferences */ private void doWrite(List<String> args) throws DumpUsageException { String usagePrefix = "Usage: prefs write <path> <key> <type> <value>, where type is one of: "; Iterator<String> argsIter = args.iterator(); String path = nextArg(argsIter, "Expected <path>"); String key = nextArg(argsIter, "Expected <key>"); String typeName = nextArg(argsIter, "Expected <type>"); Type type = Type.of(typeName); if (type == null) { throw new DumpUsageException( Type.appendNamesList(new StringBuilder(usagePrefix), ", ").toString()); } SharedPreferences sharedPreferences = getSharedPreferences(path); SharedPreferences.Editor editor = sharedPreferences.edit(); switch (type) { case BOOLEAN: editor.putBoolean(key, Boolean.valueOf(nextArgValue(argsIter))); break; case INT: editor.putInt(key, Integer.valueOf(nextArgValue(argsIter))); break; case LONG: editor.putLong(key, Long.valueOf(nextArgValue(argsIter))); break; case FLOAT: editor.putFloat(key, Float.valueOf(nextArgValue(argsIter))); break; case STRING: editor.putString(key, nextArgValue(argsIter)); break; case SET: putStringSet(editor, key, argsIter); break; } editor.commit(); } @Nonnull private static String nextArg(Iterator<String> iter, String messageIfNotPresent) throws DumpUsageException { if (!iter.hasNext()) { throw new DumpUsageException(messageIfNotPresent); } return iter.next(); } @Nonnull private static String nextArgValue(Iterator<String> iter) throws DumpUsageException { return nextArg(iter, "Expected <value>"); } @TargetApi(Build.VERSION_CODES.HONEYCOMB) private static void putStringSet( SharedPreferences.Editor editor, String key, Iterator<String> remainingArgs) { HashSet<String> set = new HashSet<String>(); while (remainingArgs.hasNext()) { set.add(remainingArgs.next()); } editor.putStringSet(key, set); } /** * Execute command to print all keys and values stored in the shared preferences which match * the optional given prefix */ private void doPrint(PrintStream writer, List<String> args) { String rootPath = mAppContext.getApplicationInfo().dataDir + "/shared_prefs"; String offsetPrefix = args.isEmpty() ? "" : args.get(0); String keyPrefix = (args.size() > 1) ? args.get(1) : ""; printRecursive(writer, rootPath, "", offsetPrefix, keyPrefix); } private void printRecursive( PrintStream writer, String rootPath, String offsetPath, String pathPrefix, String keyPrefix) { File file = new File(rootPath, offsetPath); if (file.isFile()) { if (offsetPath.endsWith(XML_SUFFIX)) { int suffixLength = XML_SUFFIX.length(); String prefsName = offsetPath.substring(0, offsetPath.length() - suffixLength); printFile(writer, prefsName, keyPrefix); } } else if (file.isDirectory()) { String[] children = file.list(); if (children != null) { for (int i = 0; i < children.length; i++) { String childOffsetPath = TextUtils.isEmpty(offsetPath) ? children[i] : (offsetPath + File.separator + children[i]); if (childOffsetPath.startsWith(pathPrefix)) { printRecursive(writer, rootPath, childOffsetPath, pathPrefix, keyPrefix); } } } } } private void printFile(PrintStream writer, String prefsName, String keyPrefix) { writer.println(prefsName + ":"); SharedPreferences preferences = getSharedPreferences(prefsName); for (Map.Entry<String, ?> entry : preferences.getAll().entrySet()) { if (entry.getKey().startsWith(keyPrefix)) { writer.println(" " + entry.getKey() + " = " + entry.getValue()); } } } private void doUsage(PrintStream writer) { final String cmdName = "dumpapp " + NAME; String usagePrefix = "Usage: " + cmdName + " "; String blankPrefix = " " + cmdName + " "; writer.println(usagePrefix + "<command> [command-options]"); writer.println(usagePrefix + "print [pathPrefix [keyPrefix]]"); writer.println( Type.appendNamesList( new StringBuilder(blankPrefix).append("write <path> <key> <"), "|") .append("> <value>")); writer.println(); writer.println(cmdName + " print: Print all matching values from the shared preferences"); writer.println(); writer.println(cmdName + " write: Writes a value to the shared preferences"); } private SharedPreferences getSharedPreferences(String name) { return mAppContext.getSharedPreferences(name, Context.MODE_MULTI_PROCESS); } private enum Type { BOOLEAN("boolean"), INT("int"), LONG("long"), FLOAT("float"), STRING("string"), SET("set"); private final String name; private Type(String name) { this.name = name; } public static @Nullable Type of(String name) { for (Type type : values()) { if (type.name.equals(name)) { return type; } } return null; } public static StringBuilder appendNamesList(StringBuilder builder, String separator) { boolean isFirst = true; for (Type type : values()) { if (isFirst) { isFirst = false; } else { builder.append(separator); } builder.append(type.name); } return builder; } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package ayoayoboardgame; import java.util.HashMap; import java.util.Map; /** * * @author Odigie Oseme Utibe | osemeodigie@yahoo.com | http://osemeodigie.com */ public class AIPlayer { public String Name; public int PlayerRow; public boolean Thinking; public AyoGameBoard AIGameBoard; // a public GameBoard object for the AI to use... private Map<String, AIPlayerTypes> playerTypes; private int depthToUse; private String playerType; private String thinkingText; private AyoGameManager gameManager; public AIPlayer(AyoGameManager gameManager){ this.gameManager = gameManager; initializePlayer("beginner"); } /** * Creates a new AI player * @param gameManager * @param AIName */ public AIPlayer(AyoGameManager gameManager, String AIName){ this.gameManager = gameManager; initializePlayer(AIName); } private void initializePlayer(String playerType){ // Begineer : Normal : Hard : Very Hard playerType = playerType.toUpperCase(); // Create a Map... playerTypes = new HashMap<>(); playerTypes.put("BEGINNER", new AIPlayerTypes("FEMI",1)); // Begineer playerTypes.put("INTERMIDATE", new AIPlayerTypes("KAYODE",5)); // Normal playerTypes.put("HARD", new AIPlayerTypes("MR. SHOLA",8)); // Hard playerTypes.put("VETERAN", new AIPlayerTypes("BABA IJEBU",11)); // Very Hard PlayerRow = GameConstants.TOP_ROW; // use the top row for the AI player... Thinking = false; // initial thinking state... if(playerTypes.containsKey(playerType)){ depthToUse = playerTypes.get(playerType).AIDepth; Name = playerTypes.get(playerType).PlayerName; this.playerType = playerType; } else{ // Default values for the AI player... depthToUse = playerTypes.get("BEGINNER").AIDepth; Name = playerTypes.get("BEGINNER").PlayerName; this.playerType = "BEGINNER"; } initThinkingText(); // initalize the thinking speech bubble... } public void setPlayerType(String playerType){ playerType = playerType.toUpperCase(); if(playerTypes.containsKey(playerType)){ depthToUse = playerTypes.get(playerType).AIDepth; Name = playerTypes.get(playerType).PlayerName; this.playerType = playerType; } else{ // Default values for the AI player... depthToUse = playerTypes.get("BEGINNER").AIDepth; Name = playerTypes.get("BEGINNER").PlayerName; this.playerType = "BEGINNER"; } } public int getAIDepth(){ return this.depthToUse; } // used to duplicate the a 2 dimensional array like the game board :) private int[][] GetGameBoardDuplicate(int[][] GameBoard){ // define an array to hold the copied game board.. int[][] result = new int[2][8]; for (int i = 0; i < 2; i++) { System.arraycopy(GameBoard[i], 0, result[i], 0, 8); } return result; } public void animateComputerTurn(AyoGameBoard bd){ // get the best possible move... // the play it as the computer... AIGameBoard = new AyoGameBoard(); AIGameBoard.SetGameBoard(GetGameBoardDuplicate(bd.GetGameBoard())); AIGameBoard.initial_seeds_in_bin = gameManager.seedPerHouse; int best_bin = bestMove(); bd.AnimatePlayTurn(0, best_bin-1); } public int bestMove() { int bestMove = 1; AyoGameBoard duplicateGameBoard = AIGameBoard; duplicateGameBoard.SetGameBoard(GetGameBoardDuplicate(AIGameBoard.GetGameBoard())); float currentMaximumScore = MiniMaxFunction(1, duplicateGameBoard, new Player(), new Player()); for (int j = 2; j <= 6; j++) { AyoGameBoard duplicateGameBoard1 = AIGameBoard; duplicateGameBoard1.SetGameBoard(GetGameBoardDuplicate(AIGameBoard.GetGameBoard())); float f = MiniMaxFunction(j, duplicateGameBoard1, new Player(),new Player()); // the Math random here is used to simulate a more realistic game play when there // is more than one choices (graph node) that give the same maximum score... if (f > currentMaximumScore || (f == currentMaximumScore && (Math.random() < .5))) { currentMaximumScore = f; // The maximum score so far... bestMove = j; // The best move so far that maximizes the AI's score... } } // We must have picked up an empty bin by mistake.... :-( // and there are a lot of empty bins so we look for the first non-empty one we can find :-) if (currentMaximumScore == GameConstants.EMPTY_BIN) { for (int j = 1; j <= 6; j++) { AyoGameBoard duplicateGameBoard2 = AIGameBoard; duplicateGameBoard2.SetGameBoard(GetGameBoardDuplicate(AIGameBoard.GetGameBoard())); if (duplicateGameBoard2.GetGameBoard()[this.PlayerRow][j] > 0) { return j; } } } return bestMove; } // This uses the standard minMax algorithm to search through the game tree.... // This is based on the Greedy Algorithm method of searching through a graph... // Could optimize this later... public float MiniMaxFunction(int selectedBin, AyoGameBoard GameBoard, Player player1, Player player2) { int player = GameBoard.GetGameBoard()[0][7]; // get the current player's turn.... if (GameBoard.IsBinEmpty(player, selectedBin)) { return GameConstants.EMPTY_BIN; // We cannot grab an empty bin... } float gm = GameBoard.PlayTurn(player, selectedBin); if (GameBoard.CheckBoardForWinners(false) != GameConstants.GAME_CONTINUES) { // looks like we have a winner, return the winner/draw, else keep playing... return (float)GameBoard.CheckBoardForWinners(false); } // check if the turn of player has changed... // This is used to decide whether to maximize the score or to minimize it... boolean change = (GameBoard.GetGameBoard()[0][7] == player); player = GameBoard.GetGameBoard()[0][7]; Player nextPlayer1; Player nextPlayer2; if (change) { nextPlayer1 = new Player(); nextPlayer2 = player1; } else { // maintain the player's turns... nextPlayer1 = player1; nextPlayer2 = player2; } int depth = GameBoard.GetGameBoard()[1][7]; // If loop at final search depth, then calculate the // max score that the current AI player will get... if (depth == 0) { return GameBoard.GetGameBoard()[this.PlayerRow][0] - GameBoard.GetGameBoard()[1-this.PlayerRow][0]; } // human player, then minimize the AI Player's score... if (player != this.PlayerRow) { AyoGameBoard duplicateGameBoard = GameBoard; duplicateGameBoard.SetGameBoard(GetGameBoardDuplicate(GameBoard.GetGameBoard())); float currentMinimumScore = MiniMaxFunction(1, duplicateGameBoard, nextPlayer1, nextPlayer2); for (int i = 2; i <= 6; i++) { if (change) { nextPlayer1 = new Player(); } AyoGameBoard duplicateGameBoard1 = GameBoard; duplicateGameBoard1.SetGameBoard(GetGameBoardDuplicate(GameBoard.GetGameBoard())); float f = MiniMaxFunction(i, duplicateGameBoard1, nextPlayer1, nextPlayer2); // Minimize the minimum player 2 score and make sure the value is not an empty bin if((player2.minValue > f) && (f > GameConstants.EMPTY_BIN)){ return f; } // Minimize the Maximum player 1 score and make sure the value is not an empty bin if ((player1.maxValue > f) && (f > GameConstants.EMPTY_BIN)) { player1.maxValue = f; } if (((f < currentMinimumScore) && (f > 20 + GameConstants.EMPTY_BIN)) || (currentMinimumScore <= GameConstants.EMPTY_BIN + 10)) { currentMinimumScore = f; } } if (currentMinimumScore == GameConstants.EMPTY_BIN) { // There must be an error somewhere in the minMax method... // We mistakenly picked an empty bin... return GameConstants.EMPTY_BIN; // We cannot grab an empty bin... } return currentMinimumScore; } else { // AIPlayer, maximize the AI's scores... AyoGameBoard duplicateGameBoard = GameBoard; duplicateGameBoard.SetGameBoard(GetGameBoardDuplicate(GameBoard.GetGameBoard())); float currentMaximumScore = MiniMaxFunction(1, duplicateGameBoard, nextPlayer1, nextPlayer2); for (int i = 2; i <= 6; i++) { if (change) { nextPlayer1 = new Player(); } AyoGameBoard duplicateGameBoard1 = GameBoard; duplicateGameBoard1.SetGameBoard(GetGameBoardDuplicate(GameBoard.GetGameBoard())); float f = MiniMaxFunction(i, duplicateGameBoard1, nextPlayer1, nextPlayer2); if((player2.maxValue < f) && (f > GameConstants.EMPTY_BIN)){ return f; } if ((player1.minValue < f) && (f > GameConstants.EMPTY_BIN)) { player1.minValue = f; } if (f > currentMaximumScore) { currentMaximumScore = f; } } if (currentMaximumScore == GameConstants.EMPTY_BIN) { // There must be an error somewhere in the minMax method... // We mistakenly picked an empty bin... return GameConstants.EMPTY_BIN; // We cannot grab an empty bin... } return currentMaximumScore; } } // The method that gets the final greedy algorithm heuristic value for the minMax function.... // deprecated... public int MaxScoreObtainableFromPlay(AyoGameBoard GameBoard){ return GameBoard.GetGameBoard()[this.PlayerRow][0] - GameBoard.GetGameBoard()[1-this.PlayerRow][0]; } /** * This returns the player type of this AI player... * @return playerType */ public String getPlayerType(){ return playerType; } public void SetThinkingText(String newText){ this.thinkingText = newText; } private void initThinkingText(){ switch(this.Name){ default: case "FEMI": this.thinkingText = ""; break; case "KAYODE": this.thinkingText = ""; break; case "IYA BOSE": this.thinkingText = ""; break; case "BABA IJEBU": this.thinkingText = ""; break; } } }
/** */ package org.tud.inf.st.mbt.data.impl; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EDataType; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.impl.EFactoryImpl; import org.eclipse.emf.ecore.plugin.EcorePlugin; import org.tud.inf.st.mbt.data.*; /** * <!-- begin-user-doc --> * An implementation of the model <b>Factory</b>. * <!-- end-user-doc --> * @generated */ public class DataFactoryImpl extends EFactoryImpl implements DataFactory { /** * Creates the default factory implementation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static DataFactory init() { try { DataFactory theDataFactory = (DataFactory)EPackage.Registry.INSTANCE.getEFactory(DataPackage.eNS_URI); if (theDataFactory != null) { return theDataFactory; } } catch (Exception exception) { EcorePlugin.INSTANCE.log(exception); } return new DataFactoryImpl(); } /** * Creates an instance of the factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataFactoryImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EObject create(EClass eClass) { switch (eClass.getClassifierID()) { case DataPackage.DATA_STRUCTURE: return createDataStructure(); case DataPackage.DATA_LEAF: return createDataLeaf(); case DataPackage.DATA_CLASS: return createDataClass(); case DataPackage.DATA_VALUE: return createDataValue(); case DataPackage.DATA_BINDING: return createDataBinding(); case DataPackage.STEP_DATA_BINDING: return createStepDataBinding(); case DataPackage.DATA_SCENARIO: return createDataScenario(); case DataPackage.DATA_RANGE: return createDataRange(); case DataPackage.DATA_BAG: return createDataBag(); case DataPackage.TYPED_DATA_CLASS: return createTypedDataClass(); case DataPackage.INT_DATA_VALUE: return createIntDataValue(); case DataPackage.SUBSTITUTABLE_INT_RANGE: return createSubstitutableIntRange(); case DataPackage.LONG_DATA_VALUE: return createLongDataValue(); case DataPackage.BOOL_DATA_VALUE: return createBoolDataValue(); case DataPackage.FLOAT_DATA_VALUE: return createFloatDataValue(); default: throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier"); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataStructure createDataStructure() { DataStructureImpl dataStructure = new DataStructureImpl(); return dataStructure; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataLeaf createDataLeaf() { DataLeafImpl dataLeaf = new DataLeafImpl(); return dataLeaf; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataClass createDataClass() { DataClassImpl dataClass = new DataClassImpl(); return dataClass; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataValue createDataValue() { DataValueImpl dataValue = new DataValueImpl(); return dataValue; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataBinding createDataBinding() { DataBindingImpl dataBinding = new DataBindingImpl(); return dataBinding; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public StepDataBinding createStepDataBinding() { StepDataBindingImpl stepDataBinding = new StepDataBindingImpl(); return stepDataBinding; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataScenario createDataScenario() { DataScenarioImpl dataScenario = new DataScenarioImpl(); return dataScenario; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataRange createDataRange() { DataRangeImpl dataRange = new DataRangeImpl(); return dataRange; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataBag createDataBag() { DataBagImpl dataBag = new DataBagImpl(); return dataBag; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TypedDataClass createTypedDataClass() { TypedDataClassImpl typedDataClass = new TypedDataClassImpl(); return typedDataClass; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IntDataValue createIntDataValue() { IntDataValueImpl intDataValue = new IntDataValueImpl(); return intDataValue; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SubstitutableIntRange createSubstitutableIntRange() { SubstitutableIntRangeImpl substitutableIntRange = new SubstitutableIntRangeImpl(); return substitutableIntRange; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public LongDataValue createLongDataValue() { LongDataValueImpl longDataValue = new LongDataValueImpl(); return longDataValue; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public BoolDataValue createBoolDataValue() { BoolDataValueImpl boolDataValue = new BoolDataValueImpl(); return boolDataValue; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FloatDataValue createFloatDataValue() { FloatDataValueImpl floatDataValue = new FloatDataValueImpl(); return floatDataValue; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DataPackage getDataPackage() { return (DataPackage)getEPackage(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @deprecated * @generated */ @Deprecated public static DataPackage getPackage() { return DataPackage.eINSTANCE; } } //DataFactoryImpl
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: BseFriendsInfo.proto package com.xinqihd.sns.gameserver.proto; public final class XinqiBseFriendsInfo { private XinqiBseFriendsInfo() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface BseFriendsInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { // required int32 friBlood = 1; boolean hasFriBlood(); int getFriBlood(); // required int32 friThew = 2; boolean hasFriThew(); int getFriThew(); // required int32 friDamage = 3; boolean hasFriDamage(); int getFriDamage(); // required int32 friSkin = 4; boolean hasFriSkin(); int getFriSkin(); // required string friName = 5; boolean hasFriName(); String getFriName(); } public static final class BseFriendsInfo extends com.google.protobuf.GeneratedMessage implements BseFriendsInfoOrBuilder { // Use BseFriendsInfo.newBuilder() to construct. private BseFriendsInfo(Builder builder) { super(builder); } private BseFriendsInfo(boolean noInit) {} private static final BseFriendsInfo defaultInstance; public static BseFriendsInfo getDefaultInstance() { return defaultInstance; } public BseFriendsInfo getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_fieldAccessorTable; } private int bitField0_; // required int32 friBlood = 1; public static final int FRIBLOOD_FIELD_NUMBER = 1; private int friBlood_; public boolean hasFriBlood() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getFriBlood() { return friBlood_; } // required int32 friThew = 2; public static final int FRITHEW_FIELD_NUMBER = 2; private int friThew_; public boolean hasFriThew() { return ((bitField0_ & 0x00000002) == 0x00000002); } public int getFriThew() { return friThew_; } // required int32 friDamage = 3; public static final int FRIDAMAGE_FIELD_NUMBER = 3; private int friDamage_; public boolean hasFriDamage() { return ((bitField0_ & 0x00000004) == 0x00000004); } public int getFriDamage() { return friDamage_; } // required int32 friSkin = 4; public static final int FRISKIN_FIELD_NUMBER = 4; private int friSkin_; public boolean hasFriSkin() { return ((bitField0_ & 0x00000008) == 0x00000008); } public int getFriSkin() { return friSkin_; } // required string friName = 5; public static final int FRINAME_FIELD_NUMBER = 5; private java.lang.Object friName_; public boolean hasFriName() { return ((bitField0_ & 0x00000010) == 0x00000010); } public String getFriName() { java.lang.Object ref = friName_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { friName_ = s; } return s; } } private com.google.protobuf.ByteString getFriNameBytes() { java.lang.Object ref = friName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); friName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { friBlood_ = 0; friThew_ = 0; friDamage_ = 0; friSkin_ = 0; friName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasFriBlood()) { memoizedIsInitialized = 0; return false; } if (!hasFriThew()) { memoizedIsInitialized = 0; return false; } if (!hasFriDamage()) { memoizedIsInitialized = 0; return false; } if (!hasFriSkin()) { memoizedIsInitialized = 0; return false; } if (!hasFriName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, friBlood_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, friThew_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, friDamage_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeInt32(4, friSkin_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, getFriNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, friBlood_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, friThew_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, friDamage_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(4, friSkin_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(5, getFriNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_fieldAccessorTable; } // Construct using com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); friBlood_ = 0; bitField0_ = (bitField0_ & ~0x00000001); friThew_ = 0; bitField0_ = (bitField0_ & ~0x00000002); friDamage_ = 0; bitField0_ = (bitField0_ & ~0x00000004); friSkin_ = 0; bitField0_ = (bitField0_ & ~0x00000008); friName_ = ""; bitField0_ = (bitField0_ & ~0x00000010); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo.getDescriptor(); } public com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo getDefaultInstanceForType() { return com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo.getDefaultInstance(); } public com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo build() { com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo buildPartial() { com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo result = new com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.friBlood_ = friBlood_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.friThew_ = friThew_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.friDamage_ = friDamage_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.friSkin_ = friSkin_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.friName_ = friName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo) { return mergeFrom((com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo other) { if (other == com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo.getDefaultInstance()) return this; if (other.hasFriBlood()) { setFriBlood(other.getFriBlood()); } if (other.hasFriThew()) { setFriThew(other.getFriThew()); } if (other.hasFriDamage()) { setFriDamage(other.getFriDamage()); } if (other.hasFriSkin()) { setFriSkin(other.getFriSkin()); } if (other.hasFriName()) { setFriName(other.getFriName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasFriBlood()) { return false; } if (!hasFriThew()) { return false; } if (!hasFriDamage()) { return false; } if (!hasFriSkin()) { return false; } if (!hasFriName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; friBlood_ = input.readInt32(); break; } case 16: { bitField0_ |= 0x00000002; friThew_ = input.readInt32(); break; } case 24: { bitField0_ |= 0x00000004; friDamage_ = input.readInt32(); break; } case 32: { bitField0_ |= 0x00000008; friSkin_ = input.readInt32(); break; } case 42: { bitField0_ |= 0x00000010; friName_ = input.readBytes(); break; } } } } private int bitField0_; // required int32 friBlood = 1; private int friBlood_ ; public boolean hasFriBlood() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getFriBlood() { return friBlood_; } public Builder setFriBlood(int value) { bitField0_ |= 0x00000001; friBlood_ = value; onChanged(); return this; } public Builder clearFriBlood() { bitField0_ = (bitField0_ & ~0x00000001); friBlood_ = 0; onChanged(); return this; } // required int32 friThew = 2; private int friThew_ ; public boolean hasFriThew() { return ((bitField0_ & 0x00000002) == 0x00000002); } public int getFriThew() { return friThew_; } public Builder setFriThew(int value) { bitField0_ |= 0x00000002; friThew_ = value; onChanged(); return this; } public Builder clearFriThew() { bitField0_ = (bitField0_ & ~0x00000002); friThew_ = 0; onChanged(); return this; } // required int32 friDamage = 3; private int friDamage_ ; public boolean hasFriDamage() { return ((bitField0_ & 0x00000004) == 0x00000004); } public int getFriDamage() { return friDamage_; } public Builder setFriDamage(int value) { bitField0_ |= 0x00000004; friDamage_ = value; onChanged(); return this; } public Builder clearFriDamage() { bitField0_ = (bitField0_ & ~0x00000004); friDamage_ = 0; onChanged(); return this; } // required int32 friSkin = 4; private int friSkin_ ; public boolean hasFriSkin() { return ((bitField0_ & 0x00000008) == 0x00000008); } public int getFriSkin() { return friSkin_; } public Builder setFriSkin(int value) { bitField0_ |= 0x00000008; friSkin_ = value; onChanged(); return this; } public Builder clearFriSkin() { bitField0_ = (bitField0_ & ~0x00000008); friSkin_ = 0; onChanged(); return this; } // required string friName = 5; private java.lang.Object friName_ = ""; public boolean hasFriName() { return ((bitField0_ & 0x00000010) == 0x00000010); } public String getFriName() { java.lang.Object ref = friName_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); friName_ = s; return s; } else { return (String) ref; } } public Builder setFriName(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; friName_ = value; onChanged(); return this; } public Builder clearFriName() { bitField0_ = (bitField0_ & ~0x00000010); friName_ = getDefaultInstance().getFriName(); onChanged(); return this; } void setFriName(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000010; friName_ = value; onChanged(); } // @@protoc_insertion_point(builder_scope:com.xinqihd.sns.gameserver.proto.BseFriendsInfo) } static { defaultInstance = new BseFriendsInfo(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:com.xinqihd.sns.gameserver.proto.BseFriendsInfo) } private static com.google.protobuf.Descriptors.Descriptor internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\024BseFriendsInfo.proto\022 com.xinqihd.sns." + "gameserver.proto\"h\n\016BseFriendsInfo\022\020\n\010fr" + "iBlood\030\001 \002(\005\022\017\n\007friThew\030\002 \002(\005\022\021\n\tfriDama" + "ge\030\003 \002(\005\022\017\n\007friSkin\030\004 \002(\005\022\017\n\007friName\030\005 \002" + "(\tB\025B\023XinqiBseFriendsInfo" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_com_xinqihd_sns_gameserver_proto_BseFriendsInfo_descriptor, new java.lang.String[] { "FriBlood", "FriThew", "FriDamage", "FriSkin", "FriName", }, com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo.class, com.xinqihd.sns.gameserver.proto.XinqiBseFriendsInfo.BseFriendsInfo.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
/*========================================================================= * Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * more patents listed at http://www.pivotal.io/patents. *========================================================================= */ package com.gemstone.gemfire.internal.cache.tier.sockets; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import com.gemstone.gemfire.InternalGemFireError; import com.gemstone.gemfire.internal.Version; import com.gemstone.gemfire.internal.cache.tier.Command; import com.gemstone.gemfire.internal.cache.tier.MessageType; import com.gemstone.gemfire.internal.cache.tier.sockets.command.AddPdxEnum; import com.gemstone.gemfire.internal.cache.tier.sockets.command.AddPdxType; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ClearRegion; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ClientReady; import com.gemstone.gemfire.internal.cache.tier.sockets.command.CloseConnection; import com.gemstone.gemfire.internal.cache.tier.sockets.command.CommitCommand; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ContainsKey; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ContainsKey66; import com.gemstone.gemfire.internal.cache.tier.sockets.command.CreateRegion; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Destroy; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Destroy65; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Destroy70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.DestroyRegion; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ExecuteFunction; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ExecuteFunction65; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ExecuteFunction66; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ExecuteFunction70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ExecuteRegionFunction; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ExecuteRegionFunction65; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ExecuteRegionFunction66; import com.gemstone.gemfire.internal.cache.tier.sockets.command.ExecuteRegionFunctionSingleHop; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GatewayReceiverCommand; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Get70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetAll; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetAll651; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetAll70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetAllForRI; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetAllWithCallback; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetClientPRMetadataCommand; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetClientPRMetadataCommand66; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetClientPartitionAttributesCommand; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetClientPartitionAttributesCommand66; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetEntry70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetEntryCommand; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetFunctionAttribute; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetPDXEnumById; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetPDXIdForEnum; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetPDXIdForType; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetPDXTypeById; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetPdxEnums70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.GetPdxTypes70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Invalid; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Invalidate; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Invalidate70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.KeySet; import com.gemstone.gemfire.internal.cache.tier.sockets.command.MakePrimary; import com.gemstone.gemfire.internal.cache.tier.sockets.command.PeriodicAck; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Ping; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Put; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Put61; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Put65; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Put70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.PutAll; import com.gemstone.gemfire.internal.cache.tier.sockets.command.PutAll70; import com.gemstone.gemfire.internal.cache.tier.sockets.command.PutAll80; import com.gemstone.gemfire.internal.cache.tier.sockets.command.PutAllWithCallback; import com.gemstone.gemfire.internal.cache.tier.sockets.command.PutUserCredentials; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Query651; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RegisterDataSerializers; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RegisterInstantiators; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RegisterInterest; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RegisterInterest61; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RegisterInterestList; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RegisterInterestList61; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RegisterInterestList66; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RemoveAll; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RemoveUserAuth; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Request; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RequestEventValue; import com.gemstone.gemfire.internal.cache.tier.sockets.command.RollbackCommand; import com.gemstone.gemfire.internal.cache.tier.sockets.command.Size; import com.gemstone.gemfire.internal.cache.tier.sockets.command.TXFailoverCommand; import com.gemstone.gemfire.internal.cache.tier.sockets.command.TXSynchronizationCommand; import com.gemstone.gemfire.internal.cache.tier.sockets.command.UnregisterInterest; import com.gemstone.gemfire.internal.cache.tier.sockets.command.UnregisterInterestList; import com.gemstone.gemfire.internal.cache.tier.sockets.command.UpdateClientNotification; /** * A <code>CommandInitializer</code> maintains version specific commands map. * * @since 5.7 */ public class CommandInitializer { static Map<Version,Map<Integer, Command>> ALL_COMMANDS; static { initializeAllCommands(); } /** * Register a new command with the system. * @param messageType - An ordinal for this message. This must be something defined in * MessageType that has not already been allocated to a different command. * @param versionToNewCommand The command to register, for different versions. The key is * the earliest version for which this command class is valid (starting with GFE_57). * The value is the command object for clients starting with that version. */ public static void registerCommand(int messageType, Map<Version, Command> versionToNewCommand) { Command command = null; //Iterate through all the gemfire versions, and //add a command to the map for that version for(Map.Entry<Version, Map<Integer, Command>> entry : ALL_COMMANDS.entrySet()) { Version version = entry.getKey(); //Get the current set of commands for this version. Map<Integer, Command> commandMap = entry.getValue(); //See if we have a new command to insert into this map. Otherwise, keep using the command we have //already read Command newerVersion = versionToNewCommand.get(version); if(newerVersion != null) { command = newerVersion; } if(command != null) { Command oldCommand = commandMap.get(messageType); if(oldCommand != null && oldCommand != command) { throw new InternalGemFireError("Command is already defined int the map for message Type " + MessageType.getString(messageType) + ". Old Value=" + commandMap.get(messageType) + ", newValue=" + command + ", version=" + version); } commandMap.put(messageType, command); } } } private static void initializeAllCommands() { ALL_COMMANDS = new LinkedHashMap<Version,Map<Integer, Command>>(); // Initialize the GFE 5.7 commands Map<Integer, Command> gfe57Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_57, gfe57Commands); gfe57Commands.put(MessageType.PING,Ping.getCommand()); gfe57Commands.put(MessageType.REQUEST,Request.getCommand()); gfe57Commands.put(MessageType.PUT,Put.getCommand()); gfe57Commands.put(MessageType.PUTALL,PutAll.getCommand()); gfe57Commands.put(MessageType.DESTROY,Destroy.getCommand()); gfe57Commands.put(MessageType.QUERY,com.gemstone.gemfire.internal.cache.tier.sockets.command.Query.getCommand()); gfe57Commands.put(MessageType.CLEAR_REGION,ClearRegion.getCommand()); gfe57Commands.put(MessageType.DESTROY_REGION,DestroyRegion.getCommand()); gfe57Commands.put(MessageType.REGISTER_INTEREST,RegisterInterest.getCommand()); gfe57Commands.put(MessageType.UNREGISTER_INTEREST,UnregisterInterest.getCommand()); gfe57Commands.put(MessageType.REGISTER_INTEREST_LIST,RegisterInterestList.getCommand()); gfe57Commands.put(MessageType.UNREGISTER_INTEREST_LIST,UnregisterInterestList.getCommand()); gfe57Commands.put(MessageType.KEY_SET,KeySet.getCommand()); gfe57Commands.put(MessageType.CONTAINS_KEY,ContainsKey.getCommand()); gfe57Commands.put(MessageType.CREATE_REGION,CreateRegion.getCommand()); gfe57Commands.put(MessageType.MAKE_PRIMARY,MakePrimary.getCommand()); gfe57Commands.put(MessageType.PERIODIC_ACK,PeriodicAck.getCommand()); gfe57Commands.put(MessageType.REGISTER_INSTANTIATORS,RegisterInstantiators.getCommand()); gfe57Commands.put(MessageType.UPDATE_CLIENT_NOTIFICATION,UpdateClientNotification.getCommand()); gfe57Commands.put(MessageType.CLOSE_CONNECTION,CloseConnection.getCommand()); gfe57Commands.put(MessageType.CLIENT_READY,ClientReady.getCommand()); gfe57Commands.put(MessageType.INVALID,Invalid.getCommand()); gfe57Commands.put(MessageType.GET_ALL,GetAll.getCommand()); // Initialize the GFE 5.8 commands example Map<Integer, Command> gfe58Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_58, gfe58Commands); gfe58Commands.putAll(ALL_COMMANDS.get(Version.GFE_57)); gfe58Commands.put(MessageType.EXECUTE_REGION_FUNCTION,ExecuteRegionFunction.getCommand()); gfe58Commands.put(MessageType.EXECUTE_FUNCTION,ExecuteFunction.getCommand()); // Initialize the GFE 6.0.3 commands map Map<Integer, Command> gfe603Commands = new HashMap<Integer, Command>(); gfe603Commands.putAll(ALL_COMMANDS.get(Version.GFE_58)); ALL_COMMANDS.put(Version.GFE_603, gfe603Commands); // Initialize the GFE 6.1 commands Map<Integer, Command> gfe61Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_61, gfe61Commands); gfe61Commands.putAll(ALL_COMMANDS.get(Version.GFE_603)); gfe61Commands.put(MessageType.REGISTER_INTEREST,RegisterInterest61.getCommand()); gfe61Commands.put(MessageType.REGISTER_INTEREST_LIST,RegisterInterestList61.getCommand()); gfe61Commands.put(MessageType.REQUEST_EVENT_VALUE,RequestEventValue.getCommand()); gfe61Commands.put(MessageType.PUT,Put61.getCommand()); gfe61Commands.put(MessageType.REGISTER_DATASERIALIZERS,RegisterDataSerializers.getCommand()); // Initialize the GFE 6.5 commands Map<Integer, Command> gfe65Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_65, gfe65Commands); gfe65Commands.putAll(ALL_COMMANDS.get(Version.GFE_61)); gfe65Commands.put(MessageType.DESTROY,Destroy65.getCommand()); gfe65Commands.put(MessageType.PUT,Put65.getCommand()); gfe65Commands.put(MessageType.EXECUTE_REGION_FUNCTION,ExecuteRegionFunction65.getCommand()); gfe65Commands.put(MessageType.EXECUTE_FUNCTION,ExecuteFunction65.getCommand()); gfe65Commands.put(MessageType.GET_CLIENT_PR_METADATA,GetClientPRMetadataCommand.getCommand()); gfe65Commands.put(MessageType.GET_CLIENT_PARTITION_ATTRIBUTES,GetClientPartitionAttributesCommand.getCommand()); gfe65Commands.put(MessageType.USER_CREDENTIAL_MESSAGE, PutUserCredentials.getCommand()); gfe65Commands.put(MessageType.REMOVE_USER_AUTH, RemoveUserAuth.getCommand()); gfe65Commands.put(MessageType.EXECUTE_REGION_FUNCTION_SINGLE_HOP,ExecuteRegionFunctionSingleHop.getCommand()); // Initialize the GFE 6.5.1 commands Map<Integer, Command> gfe651Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_651, gfe651Commands); gfe651Commands.putAll(ALL_COMMANDS.get(Version.GFE_65)); gfe651Commands.put(MessageType.QUERY_WITH_PARAMETERS, Query651.getCommand()); // Initialize the GFE 6.5.1.6 commands Map<Integer, Command> gfe6516Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_6516, gfe6516Commands); gfe6516Commands.putAll(ALL_COMMANDS.get(Version.GFE_651)); gfe6516Commands.put(MessageType.GET_ALL,GetAll651.getCommand()); gfe6516Commands.put(MessageType.GET_CLIENT_PR_METADATA,GetClientPRMetadataCommand66.getCommand()); // Initialize the GFE 6.6 commands Map<Integer, Command> gfe66Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_66, gfe66Commands); gfe66Commands.putAll(ALL_COMMANDS.get(Version.GFE_6516)); gfe66Commands.put(MessageType.ADD_PDX_TYPE, AddPdxType.getCommand()); gfe66Commands.put(MessageType.GET_PDX_ID_FOR_TYPE, GetPDXIdForType.getCommand()); gfe66Commands.put(MessageType.GET_PDX_TYPE_BY_ID, GetPDXTypeById.getCommand()); gfe66Commands.put(MessageType.SIZE,Size.getCommand()); gfe66Commands.put(MessageType.INVALIDATE,Invalidate.getCommand()); gfe66Commands.put(MessageType.COMMIT,CommitCommand.getCommand()); gfe66Commands.put(MessageType.ROLLBACK, RollbackCommand.getCommand()); gfe66Commands.put(MessageType.TX_FAILOVER, TXFailoverCommand.getCommand()); gfe66Commands.put(MessageType.GET_ENTRY, GetEntryCommand.getCommand()); gfe66Commands.put(MessageType.TX_SYNCHRONIZATION, TXSynchronizationCommand.getCommand()); gfe66Commands.put(MessageType.GET_CLIENT_PARTITION_ATTRIBUTES, GetClientPartitionAttributesCommand66.getCommand()); gfe66Commands.put(MessageType.REGISTER_INTEREST_LIST, RegisterInterestList66.getCommand()); gfe66Commands.put(MessageType.GET_FUNCTION_ATTRIBUTES, GetFunctionAttribute.getCommand()); gfe66Commands.put(MessageType.EXECUTE_REGION_FUNCTION, ExecuteRegionFunction66.getCommand()); gfe66Commands.put(MessageType.EXECUTE_FUNCTION, ExecuteFunction66.getCommand()); gfe66Commands.put(MessageType.GET_ALL_FOR_RI, GetAllForRI.getCommand()); //TODO Pushkar Put it into newer version gfe66Commands.put(MessageType.GATEWAY_RECEIVER_COMMAND, GatewayReceiverCommand.getCommand()); gfe66Commands.put(MessageType.CONTAINS_KEY, ContainsKey66.getCommand()); // Initialize the GFE 6.6.2 commands Map<Integer, Command> gfe662Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_662, gfe662Commands); gfe662Commands.putAll(ALL_COMMANDS.get(Version.GFE_66)); gfe662Commands.put(MessageType.ADD_PDX_ENUM, AddPdxEnum.getCommand()); gfe662Commands.put(MessageType.GET_PDX_ID_FOR_ENUM, GetPDXIdForEnum.getCommand()); gfe662Commands.put(MessageType.GET_PDX_ENUM_BY_ID, GetPDXEnumById.getCommand()); // Initialize the GFE 6.6.2.2 commands (same commands as the GFE 6.6.2 commands) // The SERVER_TO_CLIENT_PING message was added, but it doesn't need to be registered here ALL_COMMANDS.put(Version.GFE_6622, gfe662Commands); // Initialize the GFE 70 commands Map<Integer, Command> gfe70Commands = new HashMap<Integer, Command>(); ALL_COMMANDS.put(Version.GFE_70, gfe70Commands); gfe70Commands.putAll(ALL_COMMANDS.get(Version.GFE_662)); gfe70Commands.remove(MessageType.GET_ALL_FOR_RI); gfe70Commands.put(MessageType.REQUEST, Get70.getCommand()); gfe70Commands.put(MessageType.GET_ENTRY, GetEntry70.getCommand()); gfe70Commands.put(MessageType.GET_ALL_70, GetAll70.getCommand()); gfe70Commands.put(MessageType.PUTALL, PutAll70.getCommand()); gfe70Commands.put(MessageType.PUT, Put70.getCommand()); gfe70Commands.put(MessageType.DESTROY, Destroy70.getCommand()); gfe70Commands.put(MessageType.INVALIDATE, Invalidate70.getCommand()); gfe70Commands.put(MessageType.GET_PDX_TYPES, GetPdxTypes70.getCommand()); gfe70Commands.put(MessageType.GET_PDX_ENUMS, GetPdxEnums70.getCommand()); gfe70Commands.put(MessageType.EXECUTE_FUNCTION, ExecuteFunction70.getCommand()); Map<Integer, Command> gfe701Commands = new HashMap<Integer, Command>(); gfe701Commands.putAll(gfe70Commands); ALL_COMMANDS.put(Version.GFE_701, gfe701Commands); Map<Integer, Command> gfe71Commands = new HashMap<Integer, Command>(); gfe71Commands.putAll(ALL_COMMANDS.get(Version.GFE_701)); ALL_COMMANDS.put(Version.GFE_71, gfe71Commands); Map<Integer, Command> gfe80Commands = new HashMap<Integer, Command>(); gfe80Commands.putAll(ALL_COMMANDS.get(Version.GFE_71)); // PutAll is changed to chunk responses back to the client gfe80Commands.put(MessageType.PUTALL, PutAll80.getCommand()); ALL_COMMANDS.put(Version.GFE_80, gfe80Commands); { Map<Integer, Command> gfe81Commands = new HashMap<Integer, Command>(); gfe81Commands.putAll(ALL_COMMANDS.get(Version.GFE_80)); gfe81Commands.put(MessageType.GET_ALL_WITH_CALLBACK, GetAllWithCallback.getCommand()); gfe81Commands.put(MessageType.PUT_ALL_WITH_CALLBACK, PutAllWithCallback.getCommand()); gfe81Commands.put(MessageType.REMOVE_ALL, RemoveAll.getCommand()); ALL_COMMANDS.put(Version.GFE_81, gfe81Commands); } Map<Integer, Command> gfe82Commands = new HashMap<Integer, Command>(); gfe82Commands.putAll(ALL_COMMANDS.get(Version.GFE_81)); ALL_COMMANDS.put(Version.GFE_82, gfe82Commands); } public static Map<Integer,Command> getCommands(Version version) { return ALL_COMMANDS.get(version); } public static Map<Integer,Command> getCommands(ServerConnection connection) { return getCommands(connection.getClientVersion()); } /** * A method used by tests for Backward compatibility */ public static void testSetCommands(Map<Integer,Command> testCommands) { ALL_COMMANDS.put(Version.TEST_VERSION, testCommands); } }
package com.trifork.hotruby.ast; import com.trifork.hotruby.interp.CompileContext; public class MethodCallExpression extends Expression { private Expression expr; private SequenceExpression args; private String method; private BlockCode block; /** used :: notation to access this */ private final boolean base_must_be_class_or_module; public MethodCallExpression(RubyCode ctx, int line, Expression receiver, String method, SequenceExpression args, BlockCode block, boolean base_must_be_class_or_module) { super(line); this.expr = receiver; this.method = method; this.args = args; this.block = block; this.base_must_be_class_or_module = base_must_be_class_or_module; if (ctx != null) { ctx.method_call_here(method); } } public void setBlock(BlockCode block) { this.block = block; } public void setArgs(SequenceExpression args) { this.args = args; } public SequenceExpression getArgs() { return args; } public Expression getExpression() { return expr; } public static MethodCallExpression make(RubyCode scope, Expression expr2) { if (expr2 instanceof MethodCallExpression) { return (MethodCallExpression) expr2; } else if (expr2 instanceof MethodDenominator) { return new MethodCallExpression(scope, expr2.line, null, ((MethodDenominator) expr2).getMethodName(), null, null, false); } else { throw new RuntimeException("cannot convert " + expr2 + " to methodCall"); } } public String getMethodName() { return method; } public void setMethodName(String string) { method = string; } public void addArgument(Expression right) { if (args == null) { args = new SequenceExpression(); } args.addExpression(right); } public String toString() { if ("[]".equals(method)) { return String.valueOf(expr) + (base_must_be_class_or_module ? "::" : ".") + "[" + (args == null ? "" : args.toString()) + "]" + (block == null ? "" : block.toString()); } else if ("[]=".equals(method)) { return String.valueOf(expr) + (base_must_be_class_or_module ? "::" : ".") + "[" + (args == null ? "" : args.toStringExceptLast()) + "]=" + (block == null ? "" : block.toString()) + (args == null ? "?" : args.last()); } else { return String.valueOf(expr) + (base_must_be_class_or_module ? "::" : ".") + method + "(" + (args == null ? "" : args.toString()) + ")" + (block == null ? "" : block.toString()); } } void compile(CompileContext ctx, boolean push) { if ("update".equals(method)) { System.out.print(""); } Expression receiver = expr; boolean is_yield = false; boolean is_super = false; if (receiver == null) { // handle special cases for "command-like" method calls // ("return"|"break"|"next"|"retry"|"redo") if ("break".equals(method)) { if (push) { ctx.emit_push_nil(); } ctx.emit_break(); return; } else if ("next".equals(method)) { ctx.emit_next(); if (push) { ctx.emit_push_nil(); } return; } else if ("retry".equals(method)) { ctx.emit_retry(); if (push) { ctx.emit_push_nil(); } return; } else if ("redo".equals(method)) { ctx.emit_redo(); if (push) { ctx.emit_push_nil(); } return; } else if ("yield".equals(method)) { is_yield = true; } else if ("super".equals(method)) { is_super = true; } else if ("return".equals(method)) { if (args == null || args.size() == 0) { ctx.emit_push_nil(); } else if (args.size() == 1 && !args.get(0).isRestArg()) { args.get(0).compile(ctx, true); } else { ArrayExpression ae = new ArrayExpression(line, args); ae.compile(ctx, true); ctx.emit_make_returnvalue(); } ctx.emit_return(); if (push) { ctx.set_stack_depth(ctx.get_stack_depth() + 1); } return; } // generic case, send message to self receiver = SelfExpression.instance; } if (base_must_be_class_or_module && isConstantName(method)) { SequenceExpression new_arg = new SequenceExpression(); new_arg.addExpression(new SymbolExpression(line, this.method)); new MethodCallExpression(null, line, receiver, "const_get", new_arg, null, false).compile(ctx, push); return; } int st = ctx.get_stack_depth(); if (!is_yield && !is_super) { receiver.compile(ctx, true); if (ctx.get_stack_depth() - 1 != st) { throw new InternalError("should only push 1 element, did push " + (ctx.get_stack_depth() - st) + ": " + receiver); } } else if (!is_yield) { // self is stored here, before args ctx.emit_push_self(); } boolean has_block_arg = false; boolean has_rest_arg = false; int arg_count = 0; if (args != null) { for (int i = 0; i < args.size(); i++) { Expression exp = args.get(i); if (exp instanceof RestArgExpression) { ((RestArgExpression)exp).compile(ctx, true, arg_count); } else { exp.compile(ctx, true); } if (exp.isRestArg()) { has_rest_arg = true; } else if (exp.isBlockArg()) { has_block_arg = true; } else { arg_count += 1; } } } boolean is_simple_binary = arg_count == 1 && !has_block_arg && !has_rest_arg && block == null; if (is_simple_binary && push) { if ("+".equals(method)) { ctx.emit_plus(); return; } else if ("-".equals(method)) { ctx.emit_minus(); return; } else if ("<".equals(method)) { ctx.emit_lt(); return; } else if ("<=".equals(method)) { ctx.emit_le(); return; } else if (">=".equals(method)) { ctx.emit_ge(); return; } else if (">".equals(method)) { ctx.emit_gt(); return; } else if (">>".equals(method)) { ctx.emit_rshft(); return; } else if ("^".equals(method)) { ctx.emit_bit_xor(); return; } else if ("|".equals(method)) { ctx.emit_bit_or(); return; } else if ("&".equals(method)) { ctx.emit_bit_and(); return; } else if ("==".equals(method)) { ctx.emit_eq2(); return; } else if ("===".equals(method)) { ctx.emit_eq3(); return; } else if ("=~".equals(method)) { ctx.emit_eqtilde(); return; } } boolean is_simple_unary = arg_count == 0 && !has_block_arg && !has_rest_arg && block == null; if (is_simple_unary && push) { if ("~".equals(method)) { ctx.emit_bit_not(); return; } } if (is_yield) { ctx.emit_invoke_block(arg_count, has_rest_arg, push); //ctx.emit_swap(); //ctx.emit_pop(); } else if (is_super) { ctx.emit_supersend(arg_count, has_rest_arg, has_block_arg, push, block); } else if (receiver == SelfExpression.instance) { ctx.emit_selfsend(method, arg_count, has_rest_arg, has_block_arg, push, block); } else { ctx.emit_send(method, arg_count, has_rest_arg, has_block_arg, push, block); } } private boolean isConstantName(String method) { return Character.isUpperCase(method.charAt(0)); } @Override public void compile_assignment(CompileContext ctx, boolean push) { Expression receiver = expr; if (base_must_be_class_or_module) { if (receiver == null) { super.compile_assignment(ctx, push); } else { super.compile_assignment(ctx, push); } } else { if (receiver == null) { throw new InternalError("huh?"); } int loc = ctx.alloc_temp(1); ctx.emit_setlocal(loc); receiver.compile(ctx, true); int arg_count = 0; boolean has_rest_arg = false; boolean has_block_arg = false; if (args != null) { for (int i = 0; i < args.size(); i++) { Expression exp = args.get(i); exp.compile(ctx, true); if (exp.isRestArg()) { has_rest_arg = true; } else if (exp.isBlockArg()) { has_block_arg = true; } else { arg_count += 1; } } } arg_count += 1; ctx.emit_getlocal(loc); ctx.emit_send(method + "=", arg_count, has_rest_arg, has_block_arg, push, block); ctx.free_temp(loc); } } }
package searchengine.features; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import searchengine.misc.Log; import searchengine.misc.Parameters; import searchengine.misc.db.DBConfig; import searchengine.misc.db.DML; import searchengine.model.AdClickItem; import searchengine.model.AdItem; import searchengine.model.UserAdItem; public class AdAPI { public List<AdItem> getAds(String query) { List<AdItem> ads = new ArrayList<AdItem>(); if (query.length() < 2) return ads; String sql = " SELECT * FROM " + DML.Table_ad + " WHERE " + DML.Col_n_grams + " LIKE ANY " + getClause(query) + " AND " + DML.Col_click_left + " > 0"; try { Connection con = DBConfig.getConnection(); PreparedStatement ps = con.prepareStatement(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { AdItem item = new AdItem(); item.setId(rs.getString(DML.Col_ad_id)); item.setDescription(rs.getString(DML.Col_description)); item.setUrl(rs.getString(DML.Col_url)); item.setImage(rs.getString(DML.Col_ad_image)); item.setNgrams(rs.getString(DML.Col_n_grams)); item.calculateScore(query); ads.add(item); } con.close(); } catch (Exception e) { e.printStackTrace(); } Collections.sort(ads, new Comparator<AdItem>() { @Override public int compare(AdItem o1, AdItem o2) { return o2.getScore() - o1.getScore(); } }); if (ads.size()>4) while (ads.size()>4) ads.remove(ads.size()-1); return ads; } public List<UserAdItem> getUserAdOverview(String username) { List<UserAdItem> items = new ArrayList<UserAdItem>(); Connection con = DBConfig.getConnection(); try { PreparedStatement ps = con.prepareStatement("Select * from " + DML.Table_ad + " WHERE " + DML.Col_user + " = '" + username +"'"); ResultSet rs = ps.executeQuery(); while (rs.next()) { UserAdItem item = new UserAdItem(); item.setUsername(username); item.setBugdet(rs.getDouble(DML.Col_budget)); item.setUrl(rs.getString(DML.Col_url)); item.setClick_left(rs.getInt(DML.Col_click_left)); item.setCostPerClick(rs.getDouble(DML.Col_cost_per_click)); item.setDescription(rs.getString(DML.Col_description)); item.setId(rs.getString(DML.Col_ad_id)); items.add(item); } con.close(); } catch (Exception e) { e.printStackTrace(); } return items; } public List<AdClickItem> getAdClicks(String id) { List<AdClickItem> clicks = new ArrayList<AdClickItem>(); Connection con = DBConfig.getConnection(); try { PreparedStatement ps = con.prepareStatement("Select * from " + DML.Table_ad_click + " WHERE " + DML.Col_ad_id + " = '" + id + "' limit 100"); ResultSet rs = ps.executeQuery(); while(rs.next()) { AdClickItem item = new AdClickItem(); item.setIp(rs.getString(DML.Col_ip)); item.setTimestamp(rs.getString(DML.Col_at_time)); clicks.add(item); } con.close(); } catch (Exception e) { } return clicks; } public void onAdClick(String id, String ipAddress) { Connection con = DBConfig.getConnection(true); try { String sql = "Update " + DML.Table_ad + " SET " + DML.Col_click_left + " = " + DML.Col_click_left + " - 1" + " WHERE " + DML.Col_ad_id + " = " + id + ";" + "Insert into " + DML.Table_ad_click + " ( " + DML.Col_ad_id + " , " + DML.Col_ip + " ) Values ('" + id + "','" + ipAddress + "')"; System.out.println(sql); PreparedStatement ps = con.prepareStatement(sql); ps.execute(); con.close(); } catch (Exception e) { e.printStackTrace(); } } private String getClause(String query) { StringBuilder builder = new StringBuilder(); builder.append(" ('{"); String[] terms = query.split(" "); for (String s : Arrays.asList(terms)) { builder.append("\"%"); builder.append(s); builder.append("%\","); } builder.deleteCharAt(builder.length()-1); builder.append("}') "); return builder.toString(); } public boolean register(String username, String ngrams, String url, String description, double budget, double costPerClick) { Connection con = DBConfig.getConnection(); try { int clickLeft = (int)(budget/Parameters.COSTPERCLICK); String sql = "insert into " + DML.Table_ad + " (" + DML.Col_user + ", " + DML.Col_n_grams + ", " + DML.Col_url + ", " + DML.Col_description + "," + DML.Col_click_left + "," + DML.Col_cost_per_click + ", " + DML.Col_budget + ", " + DML.Col_ad_image + ")" + " values ('" + username + "', '" + ngrams + "','" + url + "','" + description + "'," + clickLeft + "," + costPerClick + "," + budget + ",'') "; PreparedStatement ps = con.prepareStatement(sql); ps.execute(); con.commit(); con.close(); } catch (Exception e) { Log.logException(e); return false; } return true; } public boolean register(String username, String ngrams, String url, String description, double budget, double costPerClick, String image) { Connection con = DBConfig.getConnection(); try { int clickLeft = (int)(budget/Parameters.COSTPERCLICK); String sql = "insert into " + DML.Table_ad + " (" + DML.Col_user + ", " + DML.Col_n_grams + ", " + DML.Col_url + ", " + DML.Col_description + "," + DML.Col_click_left + "," + DML.Col_cost_per_click + ", " + DML.Col_budget + ", " + DML.Col_ad_image + ")" + " values ('" + username + "', '" + ngrams + "','" + url + "','" + description + "'," + clickLeft + "," + costPerClick + "," + budget + ",'" + image + "') "; PreparedStatement ps = con.prepareStatement(sql); ps.execute(); con.commit(); con.close(); } catch (Exception e) { Log.logException(e); return false; } return true; } public static void main (String[] args) { new AdAPI().getAds("android apple news usa"); // register("engadget", "mobile computer game news technology", "http://www.engadget.com/", "Engadget | Technology News, Advice and Features", 20, 0.02); // register("engadget", "computer game ", "http://www.engadget.com/gaming/", "Engadget | Gaming articles, stories, news and information.", 10, 0.02); // register("engadget", "technology science ", "http://www.engadget.com/science/", "Engadget | Gaming articles, stories, news and information.", 10, 0.02); // // register("pcworld", "pcworld news reveiw computer server laptop", "http://www.pcworld.com", "PCWorld - News, tips and reviews from the experts on PCs, Windows, and more", 15, 0.30); // register("pcworld", "pcworld windows mac laptop", "http://www.pcworld.com/category/laptop-computers/", "Laptops reviews, how to advice, and news", 15, 0.30); // register("pcworld", "pcworld android tablet ipad", "http://www.pcworld.com/category/tablets/", "Tablets reviews, how to advice, and news", 15, 0.30); // register("pcworld", "pcworld android mobile iphone", "http://www.pcworld.com/category/phones/", "Phones reviews, how to advice, and news", 15, 0.30); // register("pcworld", "pcworld android ios gadget", "http://www.pcworld.com/category/gadgets/", "Gadgets reviews, how to advice, and news", 15, 0.30); // // register("nytimes","usa newyork times news","http://www.nytimes.com/", "The New York Times - Breaking News, World News & Multimedia", 5,0.3,"https://pbs.twimg.com/profile_images/2044921128/finals_400x400.png"); // register("nytimes","mobile computer security network news","http://www.nytimes.com/pages/technology/index.html", "The New York Times - Technology", 5,0.3, "https://pbs.twimg.com/profile_images/2044921128/finals_400x400.png"); // register("nytimes","health medicine news","http://www.nytimes.com/pages/health/index.html", "The New York Times - Health", 5,0.3, "https://pbs.twimg.com/profile_images/2044921128/finals_400x400.png"); } }
/* * Copyright 2001-2009 Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * */ package org.quartz.impl.jdbcjobstore; /** * <p> * This interface extends <code>{@link * org.quartz.impl.jdbcjobstore.Constants}</code> * to include the query string constants in use by the <code>{@link * org.quartz.impl.jdbcjobstore.StdJDBCDelegate}</code> * class. * </p> * * @author <a href="mailto:jeff@binaryfeed.org">Jeffrey Wescott</a> */ public interface StdJDBCConstants extends Constants { /* * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * * Constants. * * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ // table prefix substitution string String TABLE_PREFIX_SUBST = "{0}"; // QUERIES String UPDATE_TRIGGER_STATES_FROM_OTHER_STATES = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ?" + " WHERE " + COL_TRIGGER_STATE + " = ? OR " + COL_TRIGGER_STATE + " = ?"; String UPDATE_TRIGGER_STATE_FROM_OTHER_STATES_BEFORE_TIME = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ?" + " WHERE (" + COL_TRIGGER_STATE + " = ? OR " + COL_TRIGGER_STATE + " = ?) AND " + COL_NEXT_FIRE_TIME + " < ?"; String SELECT_MISFIRED_TRIGGERS = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_NEXT_FIRE_TIME + " < ? " + "ORDER BY " + COL_NEXT_FIRE_TIME + " ASC"; String SELECT_TRIGGERS_IN_STATE = "SELECT " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_STATE + " = ?"; String SELECT_MISFIRED_TRIGGERS_IN_STATE = "SELECT " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_NEXT_FIRE_TIME + " < ? AND " + COL_TRIGGER_STATE + " = ? " + "ORDER BY " + COL_NEXT_FIRE_TIME + " ASC"; String COUNT_MISFIRED_TRIGGERS_IN_STATES = "SELECT COUNT(" + COL_TRIGGER_NAME + ") FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_NEXT_FIRE_TIME + " < ? " + "AND ((" + COL_TRIGGER_STATE + " = ?) OR (" + COL_TRIGGER_STATE + " = ?))"; String SELECT_MISFIRED_TRIGGERS_IN_STATES = "SELECT " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_NEXT_FIRE_TIME + " < ? " + "AND ((" + COL_TRIGGER_STATE + " = ?) OR (" + COL_TRIGGER_STATE + " = ?)) " + "ORDER BY " + COL_NEXT_FIRE_TIME + " ASC"; String SELECT_MISFIRED_TRIGGERS_IN_GROUP_IN_STATE = "SELECT " + COL_TRIGGER_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_NEXT_FIRE_TIME + " < ? AND " + COL_TRIGGER_GROUP + " = ? AND " + COL_TRIGGER_STATE + " = ? " + "ORDER BY " + COL_NEXT_FIRE_TIME + " ASC"; String SELECT_VOLATILE_TRIGGERS = "SELECT " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_IS_VOLATILE + " = ?"; String DELETE_FIRED_TRIGGERS = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS; String INSERT_JOB_DETAIL = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " (" + COL_JOB_NAME + ", " + COL_JOB_GROUP + ", " + COL_DESCRIPTION + ", " + COL_JOB_CLASS + ", " + COL_IS_DURABLE + ", " + COL_IS_VOLATILE + ", " + COL_IS_STATEFUL + ", " + COL_REQUESTS_RECOVERY + ", " + COL_JOB_DATAMAP + ") " + " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)"; String UPDATE_JOB_DETAIL = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " SET " + COL_DESCRIPTION + " = ?, " + COL_JOB_CLASS + " = ?, " + COL_IS_DURABLE + " = ?, " + COL_IS_VOLATILE + " = ?, " + COL_IS_STATEFUL + " = ?, " + COL_REQUESTS_RECOVERY + " = ?, " + COL_JOB_DATAMAP + " = ? " + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_TRIGGERS_FOR_JOB = "SELECT " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_TRIGGERS_FOR_CALENDAR = "SELECT " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_CALENDAR_NAME + " = ?"; String SELECT_STATEFUL_JOBS_OF_TRIGGER_GROUP = "SELECT DISTINCT J." + COL_JOB_NAME + ", J." + COL_JOB_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " T, " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " J WHERE T." + COL_TRIGGER_GROUP + " = ? AND T." + COL_JOB_NAME + " = J." + COL_JOB_NAME + " AND T." + COL_JOB_GROUP + " = J." + COL_JOB_GROUP + " AND J." + COL_IS_STATEFUL + " = ?"; String DELETE_JOB_LISTENERS = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_LISTENERS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String DELETE_JOB_DETAIL = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_JOB_STATEFUL = "SELECT " + COL_IS_STATEFUL + " FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_JOB_EXISTENCE = "SELECT " + COL_JOB_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String UPDATE_JOB_DATA = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " SET " + COL_JOB_DATAMAP + " = ? " + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String INSERT_JOB_LISTENER = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_JOB_LISTENERS + " (" + COL_JOB_NAME + ", " + COL_JOB_GROUP + ", " + COL_JOB_LISTENER + ") VALUES(?, ?, ?)"; String SELECT_JOB_LISTENERS = "SELECT " + COL_JOB_LISTENER + " FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_LISTENERS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_JOB_DETAIL = "SELECT *" + " FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_NUM_JOBS = "SELECT COUNT(" + COL_JOB_NAME + ") " + " FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS; String SELECT_JOB_GROUPS = "SELECT DISTINCT(" + COL_JOB_GROUP + ") FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS; String SELECT_JOBS_IN_GROUP = "SELECT " + COL_JOB_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " WHERE " + COL_JOB_GROUP + " = ?"; String SELECT_VOLATILE_JOBS = "SELECT " + COL_JOB_NAME + ", " + COL_JOB_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " WHERE " + COL_IS_VOLATILE + " = ?"; String INSERT_TRIGGER = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " (" + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + ", " + COL_JOB_NAME + ", " + COL_JOB_GROUP + ", " + COL_IS_VOLATILE + ", " + COL_DESCRIPTION + ", " + COL_NEXT_FIRE_TIME + ", " + COL_PREV_FIRE_TIME + ", " + COL_TRIGGER_STATE + ", " + COL_TRIGGER_TYPE + ", " + COL_START_TIME + ", " + COL_END_TIME + ", " + COL_CALENDAR_NAME + ", " + COL_MISFIRE_INSTRUCTION + ", " + COL_JOB_DATAMAP + ", " + COL_PRIORITY + ") " + " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; String INSERT_SIMPLE_TRIGGER = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_SIMPLE_TRIGGERS + " (" + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + ", " + COL_REPEAT_COUNT + ", " + COL_REPEAT_INTERVAL + ", " + COL_TIMES_TRIGGERED + ") " + " VALUES(?, ?, ?, ?, ?)"; String INSERT_CRON_TRIGGER = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_CRON_TRIGGERS + " (" + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + ", " + COL_CRON_EXPRESSION + ", " + COL_TIME_ZONE_ID + ") " + " VALUES(?, ?, ?, ?)"; String INSERT_BLOB_TRIGGER = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_BLOB_TRIGGERS + " (" + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + ", " + COL_BLOB + ") " + " VALUES(?, ?, ?)"; String UPDATE_TRIGGER_SKIP_DATA = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_JOB_NAME + " = ?, " + COL_JOB_GROUP + " = ?, " + COL_IS_VOLATILE + " = ?, " + COL_DESCRIPTION + " = ?, " + COL_NEXT_FIRE_TIME + " = ?, " + COL_PREV_FIRE_TIME + " = ?, " + COL_TRIGGER_STATE + " = ?, " + COL_TRIGGER_TYPE + " = ?, " + COL_START_TIME + " = ?, " + COL_END_TIME + " = ?, " + COL_CALENDAR_NAME + " = ?, " + COL_MISFIRE_INSTRUCTION + " = ?, " + COL_PRIORITY + " = ? WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String UPDATE_TRIGGER = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_JOB_NAME + " = ?, " + COL_JOB_GROUP + " = ?, " + COL_IS_VOLATILE + " = ?, " + COL_DESCRIPTION + " = ?, " + COL_NEXT_FIRE_TIME + " = ?, " + COL_PREV_FIRE_TIME + " = ?, " + COL_TRIGGER_STATE + " = ?, " + COL_TRIGGER_TYPE + " = ?, " + COL_START_TIME + " = ?, " + COL_END_TIME + " = ?, " + COL_CALENDAR_NAME + " = ?, " + COL_MISFIRE_INSTRUCTION + " = ?, " + COL_PRIORITY + " = ?, " + COL_JOB_DATAMAP + " = ? WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String UPDATE_SIMPLE_TRIGGER = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_SIMPLE_TRIGGERS + " SET " + COL_REPEAT_COUNT + " = ?, " + COL_REPEAT_INTERVAL + " = ?, " + COL_TIMES_TRIGGERED + " = ? WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String UPDATE_CRON_TRIGGER = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_CRON_TRIGGERS + " SET " + COL_CRON_EXPRESSION + " = ? WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String UPDATE_BLOB_TRIGGER = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_BLOB_TRIGGERS + " SET " + COL_BLOB + " = ? WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_TRIGGER_EXISTENCE = "SELECT " + COL_TRIGGER_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String UPDATE_TRIGGER_STATE = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ?" + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String UPDATE_TRIGGER_STATE_FROM_STATE = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ?" + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ? AND " + COL_TRIGGER_STATE + " = ?"; String UPDATE_TRIGGER_GROUP_STATE = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ?"; String UPDATE_TRIGGER_GROUP_STATE_FROM_STATE = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ?" + " WHERE " + COL_TRIGGER_GROUP + " = ? AND " + COL_TRIGGER_STATE + " = ?"; String UPDATE_TRIGGER_STATE_FROM_STATES = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ?" + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ? AND (" + COL_TRIGGER_STATE + " = ? OR " + COL_TRIGGER_STATE + " = ? OR " + COL_TRIGGER_STATE + " = ?)"; String UPDATE_TRIGGER_GROUP_STATE_FROM_STATES = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ?" + " WHERE " + COL_TRIGGER_GROUP + " = ? AND (" + COL_TRIGGER_STATE + " = ? OR " + COL_TRIGGER_STATE + " = ? OR " + COL_TRIGGER_STATE + " = ?)"; String UPDATE_JOB_TRIGGER_STATES = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ? WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String UPDATE_JOB_TRIGGER_STATES_FROM_OTHER_STATE = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " SET " + COL_TRIGGER_STATE + " = ? WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ? AND " + COL_TRIGGER_STATE + " = ?"; String DELETE_TRIGGER_LISTENERS = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGER_LISTENERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String INSERT_TRIGGER_LISTENER = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_TRIGGER_LISTENERS + " (" + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + ", " + COL_TRIGGER_LISTENER + ") VALUES(?, ?, ?)"; String SELECT_TRIGGER_LISTENERS = "SELECT " + COL_TRIGGER_LISTENER + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGER_LISTENERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String DELETE_SIMPLE_TRIGGER = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_SIMPLE_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String DELETE_CRON_TRIGGER = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_CRON_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String DELETE_BLOB_TRIGGER = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_BLOB_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String DELETE_TRIGGER = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_NUM_TRIGGERS_FOR_JOB = "SELECT COUNT(" + COL_TRIGGER_NAME + ") FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_JOB_FOR_TRIGGER = "SELECT J." + COL_JOB_NAME + ", J." + COL_JOB_GROUP + ", J." + COL_IS_DURABLE + ", J." + COL_JOB_CLASS + ", J." + COL_REQUESTS_RECOVERY + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " T, " + TABLE_PREFIX_SUBST + TABLE_JOB_DETAILS + " J WHERE T." + COL_TRIGGER_NAME + " = ? AND T." + COL_TRIGGER_GROUP + " = ? AND T." + COL_JOB_NAME + " = J." + COL_JOB_NAME + " AND T." + COL_JOB_GROUP + " = J." + COL_JOB_GROUP; String SELECT_TRIGGER = "SELECT *" + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_TRIGGER_DATA = "SELECT " + COL_JOB_DATAMAP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_TRIGGER_STATE = "SELECT " + COL_TRIGGER_STATE + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_TRIGGER_STATUS = "SELECT " + COL_TRIGGER_STATE + ", " + COL_NEXT_FIRE_TIME + ", " + COL_JOB_NAME + ", " + COL_JOB_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_SIMPLE_TRIGGER = "SELECT *" + " FROM " + TABLE_PREFIX_SUBST + TABLE_SIMPLE_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_CRON_TRIGGER = "SELECT *" + " FROM " + TABLE_PREFIX_SUBST + TABLE_CRON_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_BLOB_TRIGGER = "SELECT *" + " FROM " + TABLE_PREFIX_SUBST + TABLE_BLOB_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_NUM_TRIGGERS = "SELECT COUNT(" + COL_TRIGGER_NAME + ") " + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS; String SELECT_NUM_TRIGGERS_IN_GROUP = "SELECT COUNT(" + COL_TRIGGER_NAME + ") " + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_GROUP + " = ?"; String SELECT_TRIGGER_GROUPS = "SELECT DISTINCT(" + COL_TRIGGER_GROUP + ") FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS; String SELECT_TRIGGERS_IN_GROUP = "SELECT " + COL_TRIGGER_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_GROUP + " = ?"; String INSERT_CALENDAR = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_CALENDARS + " (" + COL_CALENDAR_NAME + ", " + COL_CALENDAR + ") " + " VALUES(?, ?)"; String UPDATE_CALENDAR = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_CALENDARS + " SET " + COL_CALENDAR + " = ? " + " WHERE " + COL_CALENDAR_NAME + " = ?"; String SELECT_CALENDAR_EXISTENCE = "SELECT " + COL_CALENDAR_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_CALENDARS + " WHERE " + COL_CALENDAR_NAME + " = ?"; String SELECT_CALENDAR = "SELECT *" + " FROM " + TABLE_PREFIX_SUBST + TABLE_CALENDARS + " WHERE " + COL_CALENDAR_NAME + " = ?"; String SELECT_REFERENCED_CALENDAR = "SELECT " + COL_CALENDAR_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_CALENDAR_NAME + " = ?"; String DELETE_CALENDAR = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_CALENDARS + " WHERE " + COL_CALENDAR_NAME + " = ?"; String SELECT_NUM_CALENDARS = "SELECT COUNT(" + COL_CALENDAR_NAME + ") " + " FROM " + TABLE_PREFIX_SUBST + TABLE_CALENDARS; String SELECT_CALENDARS = "SELECT " + COL_CALENDAR_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_CALENDARS; String SELECT_NEXT_FIRE_TIME = "SELECT MIN(" + COL_NEXT_FIRE_TIME + ") AS " + ALIAS_COL_NEXT_FIRE_TIME + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_STATE + " = ? AND " + COL_NEXT_FIRE_TIME + " >= 0"; String SELECT_TRIGGER_FOR_FIRE_TIME = "SELECT " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_STATE + " = ? AND " + COL_NEXT_FIRE_TIME + " = ?"; String SELECT_NEXT_TRIGGER_TO_ACQUIRE = "SELECT " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + ", " + COL_NEXT_FIRE_TIME + ", " + COL_PRIORITY + " FROM " + TABLE_PREFIX_SUBST + TABLE_TRIGGERS + " WHERE " + COL_TRIGGER_STATE + " = ? AND " + COL_NEXT_FIRE_TIME + " < ? " + "AND (" + COL_NEXT_FIRE_TIME + " >= ?) " + "ORDER BY "+ COL_NEXT_FIRE_TIME + " ASC, " + COL_PRIORITY + " DESC"; String INSERT_FIRED_TRIGGER = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " (" + COL_ENTRY_ID + ", " + COL_TRIGGER_NAME + ", " + COL_TRIGGER_GROUP + ", " + COL_IS_VOLATILE + ", " + COL_INSTANCE_NAME + ", " + COL_FIRED_TIME + ", " + COL_ENTRY_STATE + ", " + COL_JOB_NAME + ", " + COL_JOB_GROUP + ", " + COL_IS_STATEFUL + ", " + COL_REQUESTS_RECOVERY + ", " + COL_PRIORITY + ") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; String UPDATE_INSTANCES_FIRED_TRIGGER_STATE = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " SET " + COL_ENTRY_STATE + " = ? AND " + COL_FIRED_TIME + " = ? AND " + COL_PRIORITY+ " = ? WHERE " + COL_INSTANCE_NAME + " = ?"; String SELECT_INSTANCES_FIRED_TRIGGERS = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_INSTANCE_NAME + " = ?"; String SELECT_INSTANCES_RECOVERABLE_FIRED_TRIGGERS = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_INSTANCE_NAME + " = ? AND " + COL_REQUESTS_RECOVERY + " = ?"; String SELECT_JOB_EXECUTION_COUNT = "SELECT COUNT(" + COL_TRIGGER_NAME + ") FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_FIRED_TRIGGERS = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS; String SELECT_FIRED_TRIGGER = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_TRIGGER_NAME + " = ? AND " + COL_TRIGGER_GROUP + " = ?"; String SELECT_FIRED_TRIGGER_GROUP = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_TRIGGER_GROUP + " = ?"; String SELECT_FIRED_TRIGGERS_OF_JOB = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_JOB_NAME + " = ? AND " + COL_JOB_GROUP + " = ?"; String SELECT_FIRED_TRIGGERS_OF_JOB_GROUP = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_JOB_GROUP + " = ?"; String DELETE_FIRED_TRIGGER = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_ENTRY_ID + " = ?"; String DELETE_INSTANCES_FIRED_TRIGGERS = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_INSTANCE_NAME + " = ?"; String DELETE_VOLATILE_FIRED_TRIGGERS = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_IS_VOLATILE + " = ?"; String DELETE_NO_RECOVERY_FIRED_TRIGGERS = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS + " WHERE " + COL_INSTANCE_NAME + " = ?" + COL_REQUESTS_RECOVERY + " = ?"; String SELECT_FIRED_TRIGGER_INSTANCE_NAMES = "SELECT DISTINCT " + COL_INSTANCE_NAME + " FROM " + TABLE_PREFIX_SUBST + TABLE_FIRED_TRIGGERS; String INSERT_SCHEDULER_STATE = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_SCHEDULER_STATE + " (" + COL_INSTANCE_NAME + ", " + COL_LAST_CHECKIN_TIME + ", " + COL_CHECKIN_INTERVAL + ") VALUES(?, ?, ?)"; String SELECT_SCHEDULER_STATE = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_SCHEDULER_STATE + " WHERE " + COL_INSTANCE_NAME + " = ?"; String SELECT_SCHEDULER_STATES = "SELECT * FROM " + TABLE_PREFIX_SUBST + TABLE_SCHEDULER_STATE; String DELETE_SCHEDULER_STATE = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_SCHEDULER_STATE + " WHERE " + COL_INSTANCE_NAME + " = ?"; String UPDATE_SCHEDULER_STATE = "UPDATE " + TABLE_PREFIX_SUBST + TABLE_SCHEDULER_STATE + " SET " + COL_LAST_CHECKIN_TIME + " = ? WHERE " + COL_INSTANCE_NAME + " = ?"; String INSERT_PAUSED_TRIGGER_GROUP = "INSERT INTO " + TABLE_PREFIX_SUBST + TABLE_PAUSED_TRIGGERS + " (" + COL_TRIGGER_GROUP + ") VALUES(?)"; String SELECT_PAUSED_TRIGGER_GROUP = "SELECT " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_PAUSED_TRIGGERS + " WHERE " + COL_TRIGGER_GROUP + " = ?"; String SELECT_PAUSED_TRIGGER_GROUPS = "SELECT " + COL_TRIGGER_GROUP + " FROM " + TABLE_PREFIX_SUBST + TABLE_PAUSED_TRIGGERS; String DELETE_PAUSED_TRIGGER_GROUP = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_PAUSED_TRIGGERS + " WHERE " + COL_TRIGGER_GROUP + " = ?"; String DELETE_PAUSED_TRIGGER_GROUPS = "DELETE FROM " + TABLE_PREFIX_SUBST + TABLE_PAUSED_TRIGGERS; // CREATE TABLE qrtz_scheduler_state(INSTANCE_NAME VARCHAR2(80) NOT NULL, // LAST_CHECKIN_TIME NUMBER(13) NOT NULL, CHECKIN_INTERVAL NUMBER(13) NOT // NULL, PRIMARY KEY (INSTANCE_NAME)); } // EOF
/* * Copyright 2006-2012 ICEsoft Technologies Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.icepdf.core.pobjects.security; import org.icepdf.core.exceptions.PDFSecurityException; import org.icepdf.core.pobjects.Reference; import org.icepdf.core.util.Defs; import org.icepdf.core.util.Library; import java.io.InputStream; import java.security.Provider; import java.security.Security; import java.util.Hashtable; import java.util.Vector; import java.util.logging.Logger; import java.util.logging.Level; /** * <p>The Security Manager class manages the encryption of encrypted * PDF documents. The class is initiated by the Document class if a * Crypt key is found in the document's trailer. The singleton pattern * is implemented so that it can be called from anywhere with the PDF * object structure.</p> * <p/> * <p>There is currently only support for Adobe Standard encryption which is * supported by the StandardSecurityHandler. Additional support for custom * security handlers, public-key handlers and crypt filters is currently under * development.</p> * <p/> * <p>The Security Manager needs tobe compliant with Sun Java JCE 1.2.1 implementation. * The security manager assumes that * org.bouncycastle.jce.provider.BouncyCastleProvider can be found on the class * path and will try to load the class accordingly. However, if you have another * crypto API that you would like to use, the system property * org.icepdf.core.pobjects.security.provider can be set to the provider's class path.</p> * * @since 1.1 */ public class SecurityManager { private static final Logger logger = Logger.getLogger(SecurityManager.class.toString()); // Default Encryption dictionary, which also contians keys need for // standard, crypt and public security handlers. private EncryptionDictionary encryptDictionary = null; // Pointer to class which implements the SecurityHandler interface private SecurityHandler securityHandler = null; // flag for detecting JCE private static boolean foundJCE = false; // Add security provider of choice before Sun RSA provider (if any) static { // Load security handler from system property if possible String defaultSecurityProvider = "org.bouncycastle.jce.provider.BouncyCastleProvider"; // check system property security provider String customSecurityProvider = Defs.sysProperty("org.icepdf.core.security.jceProvider"); // if no custom security provider load default security provider if (customSecurityProvider != null) { defaultSecurityProvider = customSecurityProvider; } try { // try and create a new provider Object provider = Class.forName(defaultSecurityProvider).newInstance(); Security.insertProviderAt((Provider) provider, 2); } catch (ClassNotFoundException e) { logger.log(Level.FINE,"Optional BouncyCastle security provider not found"); } catch (InstantiationException e) { logger.log(Level.FINE,"Optional BouncyCastle security provider could not be instantiated"); } catch (IllegalAccessException e) { logger.log(Level.FINE,"Optional BouncyCastle security provider could not be created"); } try { Class.forName("javax.crypto.Cipher"); foundJCE = true; } catch (ClassNotFoundException e) { logger.log(Level.SEVERE,"Sun JCE Support Not Found"); } } /** * Disposes of the security handler instance. */ public void dispose() { } /** * Creates new instance of SecurityManager object. * * @param library library of documents PDF objects * @param encryptionDictionary encryption dictionary key values * @param fileID fileID of PDF document * @throws PDFSecurityException if the security provider could not be found */ public SecurityManager(Library library, Hashtable encryptionDictionary, Vector fileID) throws PDFSecurityException { // Check to make sure that if run under JDK 1.3 that the JCE libraries // are installed as extra packages if (!foundJCE) { logger.log(Level.SEVERE,"Sun JCE support was not found on classpath"); throw new PDFSecurityException("Sun JCE Support Not Found"); } // create dictionary for document encryptDictionary = new EncryptionDictionary(library, encryptionDictionary, fileID); // create security Handler based on dictionary entries. if (encryptDictionary.getPreferredSecurityHandlerName(). equalsIgnoreCase("Standard")) { securityHandler = new StandardSecurityHandler(encryptDictionary); // initiate the handler securityHandler.init(); } else { throw new PDFSecurityException("Security Provider Not Found."); } } /** * Gets the permission associated with the document's encryption handler. * * @return permission object */ public Permissions getPermissions() { return securityHandler.getPermissions(); } /** * Gets the SecurityHandler associated with this Security Manager. * * @return security handler object. */ public SecurityHandler getSecurityHandler() { return securityHandler; } /** * Gets the encryption dictionary associated with the document encryption * handler. * * @return encryption dictionary */ public EncryptionDictionary getEncryptionDictionary() { return encryptDictionary; } /** * Gets the encryption key used by the security handler when encrypting data. * * @return encryption key used to encrypt the data */ public byte[] getEncryptionKey() { return securityHandler.getEncryptionKey(); } /** * Gets the decrypt key used by the security handler when decrypting data. * * @return decryption key used to encrypt the data */ public byte[] getDecryptionKey() { return securityHandler.getDecryptionKey(); } /** * Encrypt the <code>data</code> using the <code>encryptionKey</code> and * <code>objectReference</code> of the PDF stream or String object. * * @param objectReference PDF objects number and revision number * @param encryptionKey encryption key used to encrypt the data * @param data byte data of a PDF Stream or String object * @return encrypted data */ public byte[] encrypt(Reference objectReference, byte[] encryptionKey, byte[] data) { return securityHandler.encrypt(objectReference, encryptionKey, data); } /** * Decrypt the <code>data</code> using the <code>encryptionKey</code> and * <code>objectReference</code> of the PDF stream or String object. * * @param objectReference PDF objects number and revision number * @param encryptionKey encryption key used to decrypt the data * @param data byte data of a PDF Stream or String object * @return decrypted data */ public byte[] decrypt(Reference objectReference, byte[] encryptionKey, byte[] data) { return securityHandler.decrypt(objectReference, encryptionKey, data); } /** * Return a new InputStream, from which read operations will return * data, read and decrypt from the InputStream parameter * <code>objectReference</code> of the PDF stream or String object. * * @param objectReference PDF objects number and revision number * @param encryptionKey encryption key used to decrypt the data * @param input InputStream giving access to encrypted data * @param decodeParams crypt filter optional parameters, can be null. * @param returnInputIfNullResult If results end up being null, then return input instead of null * @return InputStream giving access to decrypted data */ public InputStream getEncryptionInputStream( Reference objectReference, byte[] encryptionKey, Hashtable decodeParams, InputStream input, boolean returnInputIfNullResult) { InputStream result = securityHandler.getEncryptionInputStream( objectReference, encryptionKey, decodeParams, input); if (returnInputIfNullResult && result == null) result = input; return result; } /** * Determines whether the supplied password is authorized to view the * PDF document. If a password is rejected, the user should be restricted * from viewing the document. * * @param password password to authorize * @return true, if the password was authorized successfully; false, otherwise. */ public boolean isAuthorized(String password) { return securityHandler.isAuthorized(password); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/appengine/v1/appengine.proto package com.google.appengine.v1; /** * * * <pre> * Response message for `Instances.ListInstances`. * </pre> * * Protobuf type {@code google.appengine.v1.ListInstancesResponse} */ public final class ListInstancesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.appengine.v1.ListInstancesResponse) ListInstancesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListInstancesResponse.newBuilder() to construct. private ListInstancesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListInstancesResponse() { instances_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListInstancesResponse(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListInstancesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { instances_ = new java.util.ArrayList<com.google.appengine.v1.Instance>(); mutable_bitField0_ |= 0x00000001; } instances_.add( input.readMessage(com.google.appengine.v1.Instance.parser(), extensionRegistry)); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); nextPageToken_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { instances_ = java.util.Collections.unmodifiableList(instances_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.appengine.v1.ListInstancesResponse.class, com.google.appengine.v1.ListInstancesResponse.Builder.class); } public static final int INSTANCES_FIELD_NUMBER = 1; private java.util.List<com.google.appengine.v1.Instance> instances_; /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public java.util.List<com.google.appengine.v1.Instance> getInstancesList() { return instances_; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.appengine.v1.InstanceOrBuilder> getInstancesOrBuilderList() { return instances_; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public int getInstancesCount() { return instances_.size(); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public com.google.appengine.v1.Instance getInstances(int index) { return instances_.get(index); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ @java.lang.Override public com.google.appengine.v1.InstanceOrBuilder getInstancesOrBuilder(int index) { return instances_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; private volatile java.lang.Object nextPageToken_; /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < instances_.size(); i++) { output.writeMessage(1, instances_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < instances_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, instances_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.appengine.v1.ListInstancesResponse)) { return super.equals(obj); } com.google.appengine.v1.ListInstancesResponse other = (com.google.appengine.v1.ListInstancesResponse) obj; if (!getInstancesList().equals(other.getInstancesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getInstancesCount() > 0) { hash = (37 * hash) + INSTANCES_FIELD_NUMBER; hash = (53 * hash) + getInstancesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.appengine.v1.ListInstancesResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.appengine.v1.ListInstancesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.appengine.v1.ListInstancesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.appengine.v1.ListInstancesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for `Instances.ListInstances`. * </pre> * * Protobuf type {@code google.appengine.v1.ListInstancesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.appengine.v1.ListInstancesResponse) com.google.appengine.v1.ListInstancesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.appengine.v1.ListInstancesResponse.class, com.google.appengine.v1.ListInstancesResponse.Builder.class); } // Construct using com.google.appengine.v1.ListInstancesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getInstancesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (instancesBuilder_ == null) { instances_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { instancesBuilder_.clear(); } nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_ListInstancesResponse_descriptor; } @java.lang.Override public com.google.appengine.v1.ListInstancesResponse getDefaultInstanceForType() { return com.google.appengine.v1.ListInstancesResponse.getDefaultInstance(); } @java.lang.Override public com.google.appengine.v1.ListInstancesResponse build() { com.google.appengine.v1.ListInstancesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.appengine.v1.ListInstancesResponse buildPartial() { com.google.appengine.v1.ListInstancesResponse result = new com.google.appengine.v1.ListInstancesResponse(this); int from_bitField0_ = bitField0_; if (instancesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { instances_ = java.util.Collections.unmodifiableList(instances_); bitField0_ = (bitField0_ & ~0x00000001); } result.instances_ = instances_; } else { result.instances_ = instancesBuilder_.build(); } result.nextPageToken_ = nextPageToken_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.appengine.v1.ListInstancesResponse) { return mergeFrom((com.google.appengine.v1.ListInstancesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.appengine.v1.ListInstancesResponse other) { if (other == com.google.appengine.v1.ListInstancesResponse.getDefaultInstance()) return this; if (instancesBuilder_ == null) { if (!other.instances_.isEmpty()) { if (instances_.isEmpty()) { instances_ = other.instances_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureInstancesIsMutable(); instances_.addAll(other.instances_); } onChanged(); } } else { if (!other.instances_.isEmpty()) { if (instancesBuilder_.isEmpty()) { instancesBuilder_.dispose(); instancesBuilder_ = null; instances_ = other.instances_; bitField0_ = (bitField0_ & ~0x00000001); instancesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getInstancesFieldBuilder() : null; } else { instancesBuilder_.addAllMessages(other.instances_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.appengine.v1.ListInstancesResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.appengine.v1.ListInstancesResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.appengine.v1.Instance> instances_ = java.util.Collections.emptyList(); private void ensureInstancesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { instances_ = new java.util.ArrayList<com.google.appengine.v1.Instance>(instances_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.appengine.v1.Instance, com.google.appengine.v1.Instance.Builder, com.google.appengine.v1.InstanceOrBuilder> instancesBuilder_; /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public java.util.List<com.google.appengine.v1.Instance> getInstancesList() { if (instancesBuilder_ == null) { return java.util.Collections.unmodifiableList(instances_); } else { return instancesBuilder_.getMessageList(); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public int getInstancesCount() { if (instancesBuilder_ == null) { return instances_.size(); } else { return instancesBuilder_.getCount(); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.Instance getInstances(int index) { if (instancesBuilder_ == null) { return instances_.get(index); } else { return instancesBuilder_.getMessage(index); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder setInstances(int index, com.google.appengine.v1.Instance value) { if (instancesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInstancesIsMutable(); instances_.set(index, value); onChanged(); } else { instancesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder setInstances( int index, com.google.appengine.v1.Instance.Builder builderForValue) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.set(index, builderForValue.build()); onChanged(); } else { instancesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addInstances(com.google.appengine.v1.Instance value) { if (instancesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInstancesIsMutable(); instances_.add(value); onChanged(); } else { instancesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addInstances(int index, com.google.appengine.v1.Instance value) { if (instancesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInstancesIsMutable(); instances_.add(index, value); onChanged(); } else { instancesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addInstances(com.google.appengine.v1.Instance.Builder builderForValue) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.add(builderForValue.build()); onChanged(); } else { instancesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addInstances( int index, com.google.appengine.v1.Instance.Builder builderForValue) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.add(index, builderForValue.build()); onChanged(); } else { instancesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder addAllInstances( java.lang.Iterable<? extends com.google.appengine.v1.Instance> values) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, instances_); onChanged(); } else { instancesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder clearInstances() { if (instancesBuilder_ == null) { instances_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { instancesBuilder_.clear(); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public Builder removeInstances(int index) { if (instancesBuilder_ == null) { ensureInstancesIsMutable(); instances_.remove(index); onChanged(); } else { instancesBuilder_.remove(index); } return this; } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.Instance.Builder getInstancesBuilder(int index) { return getInstancesFieldBuilder().getBuilder(index); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.InstanceOrBuilder getInstancesOrBuilder(int index) { if (instancesBuilder_ == null) { return instances_.get(index); } else { return instancesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public java.util.List<? extends com.google.appengine.v1.InstanceOrBuilder> getInstancesOrBuilderList() { if (instancesBuilder_ != null) { return instancesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(instances_); } } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.Instance.Builder addInstancesBuilder() { return getInstancesFieldBuilder() .addBuilder(com.google.appengine.v1.Instance.getDefaultInstance()); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public com.google.appengine.v1.Instance.Builder addInstancesBuilder(int index) { return getInstancesFieldBuilder() .addBuilder(index, com.google.appengine.v1.Instance.getDefaultInstance()); } /** * * * <pre> * The instances belonging to the requested version. * </pre> * * <code>repeated .google.appengine.v1.Instance instances = 1;</code> */ public java.util.List<com.google.appengine.v1.Instance.Builder> getInstancesBuilderList() { return getInstancesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.appengine.v1.Instance, com.google.appengine.v1.Instance.Builder, com.google.appengine.v1.InstanceOrBuilder> getInstancesFieldBuilder() { if (instancesBuilder_ == null) { instancesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.appengine.v1.Instance, com.google.appengine.v1.Instance.Builder, com.google.appengine.v1.InstanceOrBuilder>( instances_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); instances_ = null; } return instancesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; onChanged(); return this; } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); onChanged(); return this; } /** * * * <pre> * Continuation token for fetching the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.appengine.v1.ListInstancesResponse) } // @@protoc_insertion_point(class_scope:google.appengine.v1.ListInstancesResponse) private static final com.google.appengine.v1.ListInstancesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.appengine.v1.ListInstancesResponse(); } public static com.google.appengine.v1.ListInstancesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListInstancesResponse> PARSER = new com.google.protobuf.AbstractParser<ListInstancesResponse>() { @java.lang.Override public ListInstancesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListInstancesResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListInstancesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListInstancesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.appengine.v1.ListInstancesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.completion; import com.intellij.codeInsight.*; import com.intellij.codeInsight.completion.scope.JavaCompletionProcessor; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementDecorator; import com.intellij.patterns.ElementPattern; import com.intellij.psi.*; import com.intellij.psi.filters.ElementExtractorFilter; import com.intellij.psi.filters.ElementFilter; import com.intellij.psi.filters.OrFilter; import com.intellij.psi.filters.getters.ExpectedTypesGetter; import com.intellij.psi.filters.getters.JavaMembersGetter; import com.intellij.psi.filters.types.AssignableFromFilter; import com.intellij.psi.filters.types.AssignableToFilter; import com.intellij.psi.infos.CandidateInfo; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.proximity.ReferenceListWeigher; import com.intellij.util.*; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import gnu.trove.TObjectHashingStrategy; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import static com.intellij.patterns.PlatformPatterns.psiElement; import static com.intellij.patterns.StandardPatterns.or; /** * @author peter */ public class JavaSmartCompletionContributor extends CompletionContributor { private static final TObjectHashingStrategy<ExpectedTypeInfo> EXPECTED_TYPE_INFO_STRATEGY = new TObjectHashingStrategy<ExpectedTypeInfo>() { @Override public int computeHashCode(ExpectedTypeInfo object) { return object.getType().hashCode(); } @Override public boolean equals(ExpectedTypeInfo o1, ExpectedTypeInfo o2) { return o1.getType().equals(o2.getType()); } }; private static final ElementExtractorFilter THROWABLES_FILTER = new ElementExtractorFilter(new AssignableFromFilter(CommonClassNames.JAVA_LANG_THROWABLE)); public static final ElementPattern<PsiElement> AFTER_NEW = psiElement().afterLeaf( psiElement().withText(PsiKeyword.NEW).andNot( psiElement().afterLeaf( psiElement().withText(PsiKeyword.THROW)))); static final ElementPattern<PsiElement> AFTER_THROW_NEW = psiElement().afterLeaf(psiElement().withText(PsiKeyword.NEW).afterLeaf(PsiKeyword.THROW)); public static final ElementPattern<PsiElement> INSIDE_EXPRESSION = or( psiElement().withParent(PsiExpression.class) .andNot(psiElement().withParent(PsiLiteralExpression.class)) .andNot(psiElement().withParent(PsiMethodReferenceExpression.class)), psiElement().inside(PsiClassObjectAccessExpression.class), psiElement().inside(PsiThisExpression.class), psiElement().inside(PsiSuperExpression.class)); static final ElementPattern<PsiElement> INSIDE_TYPECAST_EXPRESSION = psiElement().withParent( psiElement(PsiReferenceExpression.class).afterLeaf(psiElement().withText(")").withParent(PsiTypeCastExpression.class))); @Nullable private static ElementFilter getClassReferenceFilter(final PsiElement element, final boolean inRefList) { //throw new foo if (AFTER_THROW_NEW.accepts(element)) { return THROWABLES_FILTER; } //new xxx.yyy if (psiElement().afterLeaf(psiElement().withText(".")).withSuperParent(2, psiElement(PsiNewExpression.class)).accepts(element)) { if (((PsiNewExpression)element.getParent().getParent()).getClassReference() == element.getParent()) { PsiType[] types = ExpectedTypesGetter.getExpectedTypes(element, false); return new OrFilter(ContainerUtil.map2Array(types, ElementFilter.class, (Function<PsiType, ElementFilter>)type -> new AssignableFromFilter(type))); } } // extends/implements/throws if (inRefList) { return new ElementExtractorFilter(new ElementFilter() { @Override public boolean isAcceptable(Object aClass, @Nullable PsiElement context) { return aClass instanceof PsiClass && ReferenceListWeigher.INSTANCE.getApplicability((PsiClass)aClass, element) != ReferenceListWeigher.ReferenceListApplicability.inapplicable; } @Override public boolean isClassAcceptable(Class hintClass) { return true; } }); } return null; } public JavaSmartCompletionContributor() { extend(CompletionType.SMART, SmartCastProvider.TYPECAST_TYPE_CANDIDATE, new SmartCastProvider()); extend(CompletionType.SMART, SameSignatureCallParametersProvider.IN_CALL_ARGUMENT, new SameSignatureCallParametersProvider()); extend(CompletionType.SMART, MethodReturnTypeProvider.IN_METHOD_RETURN_TYPE, new MethodReturnTypeProvider()); extend(CompletionType.SMART, InstanceofTypeProvider.AFTER_INSTANCEOF, new InstanceofTypeProvider()); extend(CompletionType.SMART, psiElement(), new CompletionProvider<CompletionParameters>() { @Override protected void addCompletions(@NotNull final CompletionParameters parameters, @NotNull final ProcessingContext context, @NotNull final CompletionResultSet result) { if (SmartCastProvider.shouldSuggestCast(parameters)) return; final PsiElement element = parameters.getPosition(); final PsiJavaCodeReferenceElement reference = PsiTreeUtil.findElementOfClassAtOffset(element.getContainingFile(), parameters.getOffset(), PsiJavaCodeReferenceElement.class, false); if (reference != null) { boolean inRefList = ReferenceListWeigher.INSIDE_REFERENCE_LIST.accepts(element); ElementFilter filter = getClassReferenceFilter(element, inRefList); if (filter != null) { final List<ExpectedTypeInfo> infos = Arrays.asList(getExpectedTypes(parameters)); for (LookupElement item : completeReference(element, reference, filter, true, false, parameters, result.getPrefixMatcher())) { Object o = item.getObject(); if (o instanceof PsiClass || CodeInsightSettings.getInstance().SHOW_PARAMETER_NAME_HINTS_ON_COMPLETION && JavaConstructorCallElement.isConstructorCallPlace(element) && o instanceof PsiMethod && ((PsiMethod)o).isConstructor()) { if (!inRefList && o instanceof PsiClass) { item = LookupElementDecorator.withInsertHandler(item, ConstructorInsertHandler.SMART_INSTANCE); } result.addElement(decorate(item, infos)); } } } else if (INSIDE_TYPECAST_EXPRESSION.accepts(element)) { final PsiTypeCastExpression cast = PsiTreeUtil.getContextOfType(element, PsiTypeCastExpression.class, true); if (cast != null && cast.getCastType() != null) { filter = new AssignableToFilter(cast.getCastType().getType()); for (final LookupElement item : completeReference(element, reference, filter, false, true, parameters, result.getPrefixMatcher())) { result.addElement(item); } } } } } }); extend(CompletionType.SMART, INSIDE_EXPRESSION, new ExpectedTypeBasedCompletionProvider() { @Override protected void addCompletions(final CompletionParameters params, final CompletionResultSet result, final Collection<? extends ExpectedTypeInfo> _infos) { if (SmartCastProvider.shouldSuggestCast(params)) return; Consumer<LookupElement> noTypeCheck = decorateWithoutTypeCheck(result, _infos); THashSet<ExpectedTypeInfo> mergedInfos = new THashSet<>(_infos, EXPECTED_TYPE_INFO_STRATEGY); List<Runnable> chainedEtc = new ArrayList<>(); for (final ExpectedTypeInfo info : mergedInfos) { Runnable slowContinuation = ReferenceExpressionCompletionContributor.fillCompletionVariants(new JavaSmartCompletionParameters(params, info), noTypeCheck); ContainerUtil.addIfNotNull(chainedEtc, slowContinuation); } addExpectedTypeMembers(params, mergedInfos, true, noTypeCheck); PsiElement parent = params.getPosition().getParent(); if (parent instanceof PsiReferenceExpression) { CollectConversion.addCollectConversion((PsiReferenceExpression)parent, mergedInfos, noTypeCheck); } for (final ExpectedTypeInfo info : mergedInfos) { BasicExpressionCompletionContributor.fillCompletionVariants(new JavaSmartCompletionParameters(params, info), lookupElement -> { final PsiType psiType = JavaCompletionUtil.getLookupElementType(lookupElement); if (psiType != null && info.getType().isAssignableFrom(psiType)) { result.addElement(decorate(lookupElement, _infos)); } }, result.getPrefixMatcher()); } for (Runnable runnable : chainedEtc) { runnable.run(); } final boolean searchInheritors = params.getInvocationCount() > 1; if (searchInheritors) { addExpectedTypeMembers(params, mergedInfos, false, noTypeCheck); } } }); extend(CompletionType.SMART, ExpectedAnnotationsProvider.ANNOTATION_ATTRIBUTE_VALUE, new ExpectedAnnotationsProvider()); extend(CompletionType.SMART, CatchTypeProvider.CATCH_CLAUSE_TYPE, new CatchTypeProvider()); extend(CompletionType.SMART, TypeArgumentCompletionProvider.IN_TYPE_ARGS, new TypeArgumentCompletionProvider(true, null)); extend(CompletionType.SMART, AFTER_NEW, new JavaInheritorsGetter(ConstructorInsertHandler.SMART_INSTANCE)); extend(CompletionType.SMART, LabelReferenceCompletion.LABEL_REFERENCE, new LabelReferenceCompletion()); extend(CompletionType.SMART, psiElement(), new FunctionalExpressionCompletionProvider()); extend(CompletionType.SMART, psiElement().afterLeaf("::"), new MethodReferenceCompletionProvider()); } @NotNull private static Consumer<LookupElement> decorateWithoutTypeCheck(final CompletionResultSet result, final Collection<? extends ExpectedTypeInfo> infos) { return lookupElement -> result.addElement(decorate(lookupElement, infos)); } private static void addExpectedTypeMembers(CompletionParameters params, THashSet<? extends ExpectedTypeInfo> mergedInfos, boolean quick, Consumer<? super LookupElement> consumer) { PsiElement position = params.getPosition(); if (!JavaKeywordCompletion.AFTER_DOT.accepts(position)) { for (ExpectedTypeInfo info : mergedInfos) { new JavaMembersGetter(info.getType(), params).addMembers(!quick, consumer); if (!info.getDefaultType().equals(info.getType())) { new JavaMembersGetter(info.getDefaultType(), params).addMembers(!quick, consumer); } } } } @Override public void fillCompletionVariants(@NotNull CompletionParameters parameters, @NotNull CompletionResultSet result) { if (parameters.getPosition() instanceof PsiComment) { return; } super.fillCompletionVariants(parameters, JavaCompletionSorting.addJavaSorting(parameters, result)); } public static SmartCompletionDecorator decorate(LookupElement lookupElement, Collection<? extends ExpectedTypeInfo> infos) { return new SmartCompletionDecorator(lookupElement, infos); } public static ExpectedTypeInfo @NotNull [] getExpectedTypes(final CompletionParameters parameters) { return getExpectedTypes(parameters.getPosition(), parameters.getCompletionType() == CompletionType.SMART); } public static ExpectedTypeInfo @NotNull [] getExpectedTypes(PsiElement position, boolean voidable) { if (psiElement().withParent(psiElement(PsiReferenceExpression.class).withParent(PsiThrowStatement.class)).accepts(position)) { final PsiElementFactory factory = JavaPsiFacade.getElementFactory(position.getProject()); final PsiClassType classType = factory .createTypeByFQClassName(CommonClassNames.JAVA_LANG_RUNTIME_EXCEPTION, position.getResolveScope()); final List<ExpectedTypeInfo> result = new SmartList<>(); result.add(new ExpectedTypeInfoImpl(classType, ExpectedTypeInfo.TYPE_OR_SUBTYPE, classType, TailType.SEMICOLON, null, ExpectedTypeInfoImpl.NULL)); final PsiMethod method = PsiTreeUtil.getContextOfType(position, PsiMethod.class, true); if (method != null) { for (final PsiClassType type : method.getThrowsList().getReferencedTypes()) { result.add(new ExpectedTypeInfoImpl(type, ExpectedTypeInfo.TYPE_OR_SUBTYPE, type, TailType.SEMICOLON, null, ExpectedTypeInfoImpl.NULL)); } } return result.toArray(ExpectedTypeInfo.EMPTY_ARRAY); } PsiExpression expression = PsiTreeUtil.getContextOfType(position, PsiExpression.class, true); if (expression == null) return ExpectedTypeInfo.EMPTY_ARRAY; return ExpectedTypesProvider.getExpectedTypes(expression, true, voidable, false); } static Set<LookupElement> completeReference(final PsiElement element, PsiJavaCodeReferenceElement reference, final ElementFilter filter, final boolean acceptClasses, final boolean acceptMembers, CompletionParameters parameters, final PrefixMatcher matcher) { ElementFilter checkClass = new ElementFilter() { @Override public boolean isAcceptable(Object element, PsiElement context) { return filter.isAcceptable(element, context); } @Override public boolean isClassAcceptable(Class hintClass) { if (ReflectionUtil.isAssignable(PsiClass.class, hintClass)) { return acceptClasses; } if (ReflectionUtil.isAssignable(PsiVariable.class, hintClass) || ReflectionUtil.isAssignable(PsiMethod.class, hintClass) || ReflectionUtil.isAssignable(CandidateInfo.class, hintClass)) { return acceptMembers; } return false; } }; JavaCompletionProcessor.Options options = JavaCompletionProcessor.Options.DEFAULT_OPTIONS.withFilterStaticAfterInstance(parameters.getInvocationCount() <= 1); return JavaCompletionUtil.processJavaReference(element, reference, checkClass, options, matcher, parameters); } @Override public void beforeCompletion(@NotNull CompletionInitializationContext context) { if (context.getCompletionType() != CompletionType.SMART) { return; } if (!context.getEditor().getSelectionModel().hasSelection()) { final PsiFile file = context.getFile(); PsiElement element = file.findElementAt(context.getStartOffset()); if (element instanceof PsiIdentifier) { element = element.getParent(); while (element instanceof PsiJavaCodeReferenceElement || element instanceof PsiCall || element instanceof PsiThisExpression || element instanceof PsiSuperExpression || element instanceof PsiTypeElement || element instanceof PsiClassObjectAccessExpression) { int newEnd = element.getTextRange().getEndOffset(); if (element instanceof PsiMethodCallExpression) { newEnd = ((PsiMethodCallExpression)element).getMethodExpression().getTextRange().getEndOffset(); } else if (element instanceof PsiNewExpression) { final PsiJavaCodeReferenceElement classReference = ((PsiNewExpression)element).getClassReference(); if (classReference != null) { newEnd = classReference.getTextRange().getEndOffset(); } } context.setReplacementOffset(newEnd); element = element.getParent(); } } } PsiElement lastElement = context.getFile().findElementAt(context.getStartOffset() - 1); if (lastElement != null && lastElement.getText().equals("(") && lastElement.getParent() instanceof PsiParenthesizedExpression) { // don't trim dummy identifier or we won't be able to determine the type of the expression after '(' // which is needed to insert correct cast return; } context.setDummyIdentifier(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/iot/v1/resources.proto package com.google.cloud.iot.v1; /** * * * <pre> * The device state, as reported by the device. * </pre> * * Protobuf type {@code google.cloud.iot.v1.DeviceState} */ public final class DeviceState extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.iot.v1.DeviceState) DeviceStateOrBuilder { private static final long serialVersionUID = 0L; // Use DeviceState.newBuilder() to construct. private DeviceState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeviceState() { binaryData_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeviceState(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeviceState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.Timestamp.Builder subBuilder = null; if (updateTime_ != null) { subBuilder = updateTime_.toBuilder(); } updateTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(updateTime_); updateTime_ = subBuilder.buildPartial(); } break; } case 18: { binaryData_ = input.readBytes(); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.iot.v1.ResourcesProto .internal_static_google_cloud_iot_v1_DeviceState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.iot.v1.ResourcesProto .internal_static_google_cloud_iot_v1_DeviceState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.iot.v1.DeviceState.class, com.google.cloud.iot.v1.DeviceState.Builder.class); } public static final int UPDATE_TIME_FIELD_NUMBER = 1; private com.google.protobuf.Timestamp updateTime_; /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> * * @return Whether the updateTime field is set. */ @java.lang.Override public boolean hasUpdateTime() { return updateTime_ != null; } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> * * @return The updateTime. */ @java.lang.Override public com.google.protobuf.Timestamp getUpdateTime() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { return getUpdateTime(); } public static final int BINARY_DATA_FIELD_NUMBER = 2; private com.google.protobuf.ByteString binaryData_; /** * * * <pre> * The device state data. * </pre> * * <code>bytes binary_data = 2;</code> * * @return The binaryData. */ @java.lang.Override public com.google.protobuf.ByteString getBinaryData() { return binaryData_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (updateTime_ != null) { output.writeMessage(1, getUpdateTime()); } if (!binaryData_.isEmpty()) { output.writeBytes(2, binaryData_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (updateTime_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateTime()); } if (!binaryData_.isEmpty()) { size += com.google.protobuf.CodedOutputStream.computeBytesSize(2, binaryData_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.iot.v1.DeviceState)) { return super.equals(obj); } com.google.cloud.iot.v1.DeviceState other = (com.google.cloud.iot.v1.DeviceState) obj; if (hasUpdateTime() != other.hasUpdateTime()) return false; if (hasUpdateTime()) { if (!getUpdateTime().equals(other.getUpdateTime())) return false; } if (!getBinaryData().equals(other.getBinaryData())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateTime()) { hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getUpdateTime().hashCode(); } hash = (37 * hash) + BINARY_DATA_FIELD_NUMBER; hash = (53 * hash) + getBinaryData().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.iot.v1.DeviceState parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iot.v1.DeviceState parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iot.v1.DeviceState parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iot.v1.DeviceState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iot.v1.DeviceState parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iot.v1.DeviceState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iot.v1.DeviceState parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.iot.v1.DeviceState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.iot.v1.DeviceState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.iot.v1.DeviceState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.iot.v1.DeviceState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.iot.v1.DeviceState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.iot.v1.DeviceState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The device state, as reported by the device. * </pre> * * Protobuf type {@code google.cloud.iot.v1.DeviceState} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.iot.v1.DeviceState) com.google.cloud.iot.v1.DeviceStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.iot.v1.ResourcesProto .internal_static_google_cloud_iot_v1_DeviceState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.iot.v1.ResourcesProto .internal_static_google_cloud_iot_v1_DeviceState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.iot.v1.DeviceState.class, com.google.cloud.iot.v1.DeviceState.Builder.class); } // Construct using com.google.cloud.iot.v1.DeviceState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (updateTimeBuilder_ == null) { updateTime_ = null; } else { updateTime_ = null; updateTimeBuilder_ = null; } binaryData_ = com.google.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.iot.v1.ResourcesProto .internal_static_google_cloud_iot_v1_DeviceState_descriptor; } @java.lang.Override public com.google.cloud.iot.v1.DeviceState getDefaultInstanceForType() { return com.google.cloud.iot.v1.DeviceState.getDefaultInstance(); } @java.lang.Override public com.google.cloud.iot.v1.DeviceState build() { com.google.cloud.iot.v1.DeviceState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.iot.v1.DeviceState buildPartial() { com.google.cloud.iot.v1.DeviceState result = new com.google.cloud.iot.v1.DeviceState(this); if (updateTimeBuilder_ == null) { result.updateTime_ = updateTime_; } else { result.updateTime_ = updateTimeBuilder_.build(); } result.binaryData_ = binaryData_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.iot.v1.DeviceState) { return mergeFrom((com.google.cloud.iot.v1.DeviceState) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.iot.v1.DeviceState other) { if (other == com.google.cloud.iot.v1.DeviceState.getDefaultInstance()) return this; if (other.hasUpdateTime()) { mergeUpdateTime(other.getUpdateTime()); } if (other.getBinaryData() != com.google.protobuf.ByteString.EMPTY) { setBinaryData(other.getBinaryData()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.iot.v1.DeviceState parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.iot.v1.DeviceState) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.protobuf.Timestamp updateTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> updateTimeBuilder_; /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> * * @return Whether the updateTime field is set. */ public boolean hasUpdateTime() { return updateTimeBuilder_ != null || updateTime_ != null; } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> * * @return The updateTime. */ public com.google.protobuf.Timestamp getUpdateTime() { if (updateTimeBuilder_ == null) { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } else { return updateTimeBuilder_.getMessage(); } } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateTime_ = value; onChanged(); } else { updateTimeBuilder_.setMessage(value); } return this; } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (updateTimeBuilder_ == null) { updateTime_ = builderForValue.build(); onChanged(); } else { updateTimeBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> */ public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (updateTime_ != null) { updateTime_ = com.google.protobuf.Timestamp.newBuilder(updateTime_).mergeFrom(value).buildPartial(); } else { updateTime_ = value; } onChanged(); } else { updateTimeBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> */ public Builder clearUpdateTime() { if (updateTimeBuilder_ == null) { updateTime_ = null; onChanged(); } else { updateTime_ = null; updateTimeBuilder_ = null; } return this; } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> */ public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { onChanged(); return getUpdateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> */ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { if (updateTimeBuilder_ != null) { return updateTimeBuilder_.getMessageOrBuilder(); } else { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } } /** * * * <pre> * [Output only] The time at which this state version was updated in Cloud * IoT Core. * </pre> * * <code>.google.protobuf.Timestamp update_time = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getUpdateTimeFieldBuilder() { if (updateTimeBuilder_ == null) { updateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getUpdateTime(), getParentForChildren(), isClean()); updateTime_ = null; } return updateTimeBuilder_; } private com.google.protobuf.ByteString binaryData_ = com.google.protobuf.ByteString.EMPTY; /** * * * <pre> * The device state data. * </pre> * * <code>bytes binary_data = 2;</code> * * @return The binaryData. */ @java.lang.Override public com.google.protobuf.ByteString getBinaryData() { return binaryData_; } /** * * * <pre> * The device state data. * </pre> * * <code>bytes binary_data = 2;</code> * * @param value The binaryData to set. * @return This builder for chaining. */ public Builder setBinaryData(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } binaryData_ = value; onChanged(); return this; } /** * * * <pre> * The device state data. * </pre> * * <code>bytes binary_data = 2;</code> * * @return This builder for chaining. */ public Builder clearBinaryData() { binaryData_ = getDefaultInstance().getBinaryData(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.iot.v1.DeviceState) } // @@protoc_insertion_point(class_scope:google.cloud.iot.v1.DeviceState) private static final com.google.cloud.iot.v1.DeviceState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.iot.v1.DeviceState(); } public static com.google.cloud.iot.v1.DeviceState getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeviceState> PARSER = new com.google.protobuf.AbstractParser<DeviceState>() { @java.lang.Override public DeviceState parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeviceState(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DeviceState> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeviceState> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.iot.v1.DeviceState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2015 Alistair Madden <phantommelon@gmail.com>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.List; import java.util.Iterator; import java.util.Random; /** * A simple model of a Wolf. * Wolves age, move, eat rabbits/foxes, and die. * * @author David J. Barnes and Michael Kolling edited by Alistair Madden * @version 2008.03.30, 2015.02.03 */ public class Wolf extends Animal { // Characteristics shared by all wolves (static fields). // The age at which a wolf can start to breed. private static final int BREEDING_AGE = 10; // The age to which a wolf can live. private static final int MAX_AGE = 40; // The likelihood of a wolf breeding. private static final double BREEDING_PROBABILITY = 0.05; // The maximum number of births. private static final int MAX_LITTER_SIZE = 2; // The food value of a single rabbit. In effect, this is the // number of steps a wolf can go before it has to eat again. private static final int RABBIT_FOOD_VALUE = 7; // The food value of a single fox. In effect, this is the // number of steps a wolf can go before it has to eat again. private static final int FOX_FOOD_VALUE = 7; // A shared random number generator to control breeding. private static final Random rand = Randomizer.getRandom(); // Individual characteristics (instance fields). // The wolf's age. private int age; // The wolf's food level, which is increased by eating rabbits. private int foodLevel; /** * Create a Wolf. A wolf can be created as a new born (age zero * and not hungry) or with a random age and food level. * * @param randomAge If true, the wolf will have random age and hunger level. * @param field The field currently occupied. * @param location The location within the field. */ public Wolf(boolean randomAge, Field field, Location location) { super(field, location); if(randomAge) { age = rand.nextInt(MAX_AGE); foodLevel = rand.nextInt(RABBIT_FOOD_VALUE); } else { age = 0; foodLevel = RABBIT_FOOD_VALUE; } } /** * This is what the wolf does most of the time: it hunts for * rabbits. In the process, it might breed, die of hunger, * or die of old age. * @param field The field currently occupied. * @param newWolves A list to add newly born wolves to. */ public void act(List<Animal> newWolves) { incrementAge(); incrementHunger(); if(isAlive()) { giveBirth(newWolves); // Move towards a source of food if found. Location location = getLocation(); Location newLocation = findFood(location); if(newLocation == null) { // No food found - try to move to a free location. newLocation = getField().freeAdjacentLocation(location); } // See if it was possible to move. if(newLocation != null) { setLocation(newLocation); } else { // Overcrowding. setDead(); } } } /** * Increase the age. This could result in the wolf's death. */ private void incrementAge() { age++; if(age > MAX_AGE) { setDead(); } } /** * Make this wolf more hungry. This could result in the wolf's death. */ private void incrementHunger() { foodLevel--; if(foodLevel <= 0) { setDead(); } } /** * Tell the wolf to look for rabbits adjacent to its current location. * Only the first live rabbit is eaten. * @param location Where in the field it is located. * @return Where food was found, or null if it wasn't. */ private Location findFood(Location location) { Field field = getField(); List<Location> adjacent = field.adjacentLocations(getLocation()); Iterator<Location> it = adjacent.iterator(); while(it.hasNext()) { Location where = it.next(); Object animal = field.getObjectAt(where); if(animal instanceof Rabbit) { Rabbit rabbit = (Rabbit) animal; if(rabbit.isAlive()) { rabbit.setDead(); foodLevel = RABBIT_FOOD_VALUE; // Remove the dead rabbit from the field. return where; } } if(animal instanceof Fox) { Fox fox = (Fox) animal; if(fox.isAlive()) { fox.setDead(); foodLevel = FOX_FOOD_VALUE; // Remove the dead rabbit from the field. return where; } } } return null; } /** * Check whether or not this wolf is to give birth at this step. * New births will be made into free adjacent locations. * @param newWolves A list to add newly born wolves to. */ private void giveBirth(List<Animal> newWolves) { // New wolves are born into adjacent locations. // Get a list of adjacent free locations. Field field = getField(); List<Location> free = field.getFreeAdjacentLocations(getLocation()); int births = breed(); for(int b = 0; b < births && free.size() > 0; b++) { Location loc = free.remove(0); Wolf young = new Wolf(false, field, loc); newWolves.add(young); } } /** * Generate a number representing the number of births, * if it can breed. * @return The number of births (may be zero). */ private int breed() { int births = 0; if(canBreed() && rand.nextDouble() <= BREEDING_PROBABILITY) { births = rand.nextInt(MAX_LITTER_SIZE) + 1; } return births; } /** * A wolf can breed if it has reached the breeding age. */ private boolean canBreed() { return age >= BREEDING_AGE; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.LocalTimeOffset.Gap; import org.elasticsearch.common.LocalTimeOffset.Overlap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.OffsetDateTime; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.temporal.ChronoField; import java.time.temporal.ChronoUnit; import java.time.temporal.IsoFields; import java.time.temporal.TemporalField; import java.time.temporal.TemporalQueries; import java.time.zone.ZoneOffsetTransition; import java.time.zone.ZoneRules; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Objects; import java.util.concurrent.TimeUnit; /** * A strategy for rounding milliseconds since epoch. * <p> * There are two implementations for rounding. * The first one requires a date time unit and rounds to the supplied date time unit (i.e. quarter of year, day of month). * The second one allows you to specify an interval to round to. * <p> * See <a href="https://davecturner.github.io/2019/04/14/timezone-rounding.html">this</a> * blog for some background reading. Its super interesting and the links are * a comedy gold mine. If you like time zones. Or hate them. */ public abstract class Rounding implements Writeable { private static final Logger logger = LogManager.getLogger(Rounding.class); public enum DateTimeUnit { WEEK_OF_WEEKYEAR( (byte) 1, "week", IsoFields.WEEK_OF_WEEK_BASED_YEAR, true, TimeUnit.DAYS.toMillis(7) ) { private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(7); long roundFloor(long utcMillis) { return DateUtils.roundWeekOfWeekYear(utcMillis); } @Override long extraLocalOffsetLookup() { return extraLocalOffsetLookup; } }, YEAR_OF_CENTURY( (byte) 2, "year", ChronoField.YEAR_OF_ERA, false, 12 ) { private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(366); long roundFloor(long utcMillis) { return DateUtils.roundYear(utcMillis); } long extraLocalOffsetLookup() { return extraLocalOffsetLookup; } }, QUARTER_OF_YEAR( (byte) 3, "quarter", IsoFields.QUARTER_OF_YEAR, false, 3 ) { private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(92); long roundFloor(long utcMillis) { return DateUtils.roundQuarterOfYear(utcMillis); } long extraLocalOffsetLookup() { return extraLocalOffsetLookup; } }, MONTH_OF_YEAR( (byte) 4, "month", ChronoField.MONTH_OF_YEAR, false, 1 ) { private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(31); long roundFloor(long utcMillis) { return DateUtils.roundMonthOfYear(utcMillis); } long extraLocalOffsetLookup() { return extraLocalOffsetLookup; } }, DAY_OF_MONTH( (byte) 5, "day", ChronoField.DAY_OF_MONTH, true, ChronoField.DAY_OF_MONTH.getBaseUnit().getDuration().toMillis() ) { long roundFloor(long utcMillis) { return DateUtils.roundFloor(utcMillis, this.ratio); } long extraLocalOffsetLookup() { return ratio; } }, HOUR_OF_DAY( (byte) 6, "hour", ChronoField.HOUR_OF_DAY, true, ChronoField.HOUR_OF_DAY.getBaseUnit().getDuration().toMillis() ) { long roundFloor(long utcMillis) { return DateUtils.roundFloor(utcMillis, ratio); } long extraLocalOffsetLookup() { return ratio; } }, MINUTES_OF_HOUR( (byte) 7, "minute", ChronoField.MINUTE_OF_HOUR, true, ChronoField.MINUTE_OF_HOUR.getBaseUnit().getDuration().toMillis() ) { long roundFloor(long utcMillis) { return DateUtils.roundFloor(utcMillis, ratio); } long extraLocalOffsetLookup() { return ratio; } }, SECOND_OF_MINUTE( (byte) 8, "second", ChronoField.SECOND_OF_MINUTE, true, ChronoField.SECOND_OF_MINUTE.getBaseUnit().getDuration().toMillis() ) { long roundFloor(long utcMillis) { return DateUtils.roundFloor(utcMillis, ratio); } long extraLocalOffsetLookup() { return ratio; } }; private final byte id; private final TemporalField field; private final boolean isMillisBased; private final String shortName; /** * ratio to milliseconds if isMillisBased == true or to month otherwise */ protected final long ratio; DateTimeUnit(byte id, String shortName, TemporalField field, boolean isMillisBased, long ratio) { this.id = id; this.shortName = shortName; this.field = field; this.isMillisBased = isMillisBased; this.ratio = ratio; } /** * This rounds down the supplied milliseconds since the epoch down to the next unit. In order to retain performance this method * should be as fast as possible and not try to convert dates to java-time objects if possible * * @param utcMillis the milliseconds since the epoch * @return the rounded down milliseconds since the epoch */ abstract long roundFloor(long utcMillis); /** * When looking up {@link LocalTimeOffset} go this many milliseconds * in the past from the minimum millis since epoch that we plan to * look up so that we can see transitions that we might have rounded * down beyond. */ abstract long extraLocalOffsetLookup(); public byte getId() { return id; } public TemporalField getField() { return field; } public static DateTimeUnit resolve(String name) { return DateTimeUnit.valueOf(name.toUpperCase(Locale.ROOT)); } public String shortName() { return shortName; } public static DateTimeUnit resolve(byte id) { switch (id) { case 1: return WEEK_OF_WEEKYEAR; case 2: return YEAR_OF_CENTURY; case 3: return QUARTER_OF_YEAR; case 4: return MONTH_OF_YEAR; case 5: return DAY_OF_MONTH; case 6: return HOUR_OF_DAY; case 7: return MINUTES_OF_HOUR; case 8: return SECOND_OF_MINUTE; default: throw new ElasticsearchException("Unknown date time unit id [" + id + "]"); } } } public abstract void innerWriteTo(StreamOutput out) throws IOException; @Override public void writeTo(StreamOutput out) throws IOException { out.writeByte(id()); innerWriteTo(out); } public abstract byte id(); /** * A strategy for rounding milliseconds since epoch. */ public interface Prepared { /** * Rounds the given value. */ long round(long utcMillis); /** * Given the rounded value (which was potentially generated by * {@link #round(long)}, returns the next rounding value. For * example, with interval based rounding, if the interval is * 3, {@code nextRoundValue(6) = 9}. */ long nextRoundingValue(long utcMillis); /** * Given the rounded value, returns the size between this value and the * next rounded value in specified units if possible. */ double roundingSize(long utcMillis, DateTimeUnit timeUnit); /** * Returns the size of each rounding bucket in timeUnits. */ double roundingSize(DateTimeUnit timeUnit); /** * If this rounding mechanism precalculates rounding points then * this array stores dates such that each date between each entry. * if the rounding mechanism doesn't precalculate points then this * is {@code null}. */ long[] fixedRoundingPoints(); } /** * Prepare to round many times. */ public abstract Prepared prepare(long minUtcMillis, long maxUtcMillis); /** * Prepare to round many dates over an unknown range. Prefer * {@link #prepare(long, long)} if you can find the range because * it'll be much more efficient. */ public abstract Prepared prepareForUnknown(); /** * Prepare rounding using java time classes. Package private for testing. */ abstract Prepared prepareJavaTime(); /** * Rounds the given value. * @deprecated Prefer {@link #prepare} and then {@link Prepared#round(long)} */ @Deprecated public final long round(long utcMillis) { return prepare(utcMillis, utcMillis).round(utcMillis); } /** * Given the rounded value (which was potentially generated by * {@link #round(long)}, returns the next rounding value. For * example, with interval based rounding, if the interval is * 3, {@code nextRoundValue(6) = 9}. * @deprecated Prefer {@link #prepare} and then {@link Prepared#nextRoundingValue(long)} */ @Deprecated public final long nextRoundingValue(long utcMillis) { return prepare(utcMillis, utcMillis).nextRoundingValue(utcMillis); } /** * How "offset" this rounding is from the traditional "start" of the period. * @deprecated We're in the process of abstracting offset *into* Rounding * so keep any usage to migratory shims */ @Deprecated public abstract long offset(); /** * Strip the {@code offset} from these bounds. */ public abstract Rounding withoutOffset(); @Override public abstract boolean equals(Object obj); @Override public abstract int hashCode(); public static Builder builder(DateTimeUnit unit) { return new Builder(unit); } public static Builder builder(TimeValue interval) { return new Builder(interval); } public static class Builder { private final DateTimeUnit unit; private final long interval; private ZoneId timeZone = ZoneOffset.UTC; private long offset = 0; public Builder(DateTimeUnit unit) { this.unit = unit; this.interval = -1; } public Builder(TimeValue interval) { this.unit = null; if (interval.millis() < 1) throw new IllegalArgumentException("Zero or negative time interval not supported"); this.interval = interval.millis(); } public Builder timeZone(ZoneId timeZone) { if (timeZone == null) { throw new IllegalArgumentException("Setting null as timezone is not supported"); } this.timeZone = timeZone; return this; } /** * Sets the offset of this rounding from the normal beginning of the interval. Use this * to start days at 6am or months on the 15th. * @param offset the offset, in milliseconds */ public Builder offset(long offset) { this.offset = offset; return this; } public Rounding build() { Rounding rounding; if (unit != null) { rounding = new TimeUnitRounding(unit, timeZone); } else { rounding = new TimeIntervalRounding(interval, timeZone); } if (offset != 0) { rounding = new OffsetRounding(rounding, offset); } return rounding; } } private abstract class PreparedRounding implements Prepared { /** * Attempt to build a {@link Prepared} implementation that relies on pre-calcuated * "round down" points. If there would be more than {@code max} points then return * the original implementation, otherwise return the new, faster implementation. */ protected Prepared maybeUseArray(long minUtcMillis, long maxUtcMillis, int max) { long[] values = new long[1]; long rounded = round(minUtcMillis); int i = 0; values[i++] = rounded; while ((rounded = nextRoundingValue(rounded)) <= maxUtcMillis) { if (i >= max) { return this; } /* * We expect a time in the last transition (rounded - 1) to round * to the last value we calculated. If it doesn't then we're * probably doing something wrong here.... */ assert values[i - 1] == round(rounded - 1); values = ArrayUtil.grow(values, i + 1); values[i++]= rounded; } return new ArrayRounding(values, i, this); } @Override public long[] fixedRoundingPoints() { return null; } } static class TimeUnitRounding extends Rounding { static final byte ID = 1; private final DateTimeUnit unit; private final ZoneId timeZone; private final boolean unitRoundsToMidnight; TimeUnitRounding(DateTimeUnit unit, ZoneId timeZone) { this.unit = unit; this.timeZone = timeZone; this.unitRoundsToMidnight = this.unit.field.getBaseUnit().getDuration().toMillis() > 3600000L; } TimeUnitRounding(StreamInput in) throws IOException { this(DateTimeUnit.resolve(in.readByte()), in.readZoneId()); } @Override public void innerWriteTo(StreamOutput out) throws IOException { out.writeByte(unit.getId()); out.writeZoneId(timeZone); } @Override public byte id() { return ID; } private LocalDateTime truncateLocalDateTime(LocalDateTime localDateTime) { switch (unit) { case SECOND_OF_MINUTE: return localDateTime.withNano(0); case MINUTES_OF_HOUR: return LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonthValue(), localDateTime.getDayOfMonth(), localDateTime.getHour(), localDateTime.getMinute(), 0, 0); case HOUR_OF_DAY: return LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonth(), localDateTime.getDayOfMonth(), localDateTime.getHour(), 0, 0); case DAY_OF_MONTH: LocalDate localDate = localDateTime.query(TemporalQueries.localDate()); return localDate.atStartOfDay(); case WEEK_OF_WEEKYEAR: return LocalDateTime.of(localDateTime.toLocalDate(), LocalTime.MIDNIGHT).with(ChronoField.DAY_OF_WEEK, 1); case MONTH_OF_YEAR: return LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonthValue(), 1, 0, 0); case QUARTER_OF_YEAR: return LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonth().firstMonthOfQuarter(), 1, 0, 0); case YEAR_OF_CENTURY: return LocalDateTime.of(LocalDate.of(localDateTime.getYear(), 1, 1), LocalTime.MIDNIGHT); default: throw new IllegalArgumentException("NOT YET IMPLEMENTED for unit " + unit); } } @Override public Prepared prepare(long minUtcMillis, long maxUtcMillis) { /* * 128 is a power of two that isn't huge. We might be able to do * better if the limit was based on the actual type of prepared * rounding but this'll do for now. */ return prepareOffsetOrJavaTimeRounding(minUtcMillis, maxUtcMillis).maybeUseArray(minUtcMillis, maxUtcMillis, 128); } private TimeUnitPreparedRounding prepareOffsetOrJavaTimeRounding(long minUtcMillis, long maxUtcMillis) { long minLookup = minUtcMillis - unit.extraLocalOffsetLookup(); long maxLookup = maxUtcMillis; long unitMillis = 0; if (false == unitRoundsToMidnight) { /* * Units that round to midnight can round down from two * units worth of millis in the future to find the * nextRoundingValue. */ unitMillis = unit.field.getBaseUnit().getDuration().toMillis(); maxLookup += 2 * unitMillis; } LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(timeZone, minLookup, maxLookup); if (lookup == null) { // Range too long, just use java.time return prepareJavaTime(); } LocalTimeOffset fixedOffset = lookup.fixedInRange(minLookup, maxLookup); if (fixedOffset != null) { // The time zone is effectively fixed if (unitRoundsToMidnight) { return new FixedToMidnightRounding(fixedOffset); } return new FixedNotToMidnightRounding(fixedOffset, unitMillis); } if (unitRoundsToMidnight) { return new ToMidnightRounding(lookup); } return new NotToMidnightRounding(lookup, unitMillis); } @Override public Prepared prepareForUnknown() { LocalTimeOffset offset = LocalTimeOffset.fixedOffset(timeZone); if (offset != null) { if (unitRoundsToMidnight) { return new FixedToMidnightRounding(offset); } return new FixedNotToMidnightRounding(offset, unit.field.getBaseUnit().getDuration().toMillis()); } return prepareJavaTime(); } @Override TimeUnitPreparedRounding prepareJavaTime() { if (unitRoundsToMidnight) { return new JavaTimeToMidnightRounding(); } return new JavaTimeNotToMidnightRounding(unit.field.getBaseUnit().getDuration().toMillis()); } @Override public long offset() { return 0; } @Override public Rounding withoutOffset() { return this; } @Override public int hashCode() { return Objects.hash(unit, timeZone); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TimeUnitRounding other = (TimeUnitRounding) obj; return Objects.equals(unit, other.unit) && Objects.equals(timeZone, other.timeZone); } @Override public String toString() { return "Rounding[" + unit + " in " + timeZone + "]"; } private abstract class TimeUnitPreparedRounding extends PreparedRounding { @Override public double roundingSize(long utcMillis, DateTimeUnit timeUnit) { if (unit.isMillisBased) { if (timeUnit.isMillisBased) { return (double) unit.ratio / timeUnit.ratio; } else { throw new IllegalArgumentException("Cannot use month-based rate unit [" + timeUnit.shortName + "] with non-month based calendar interval histogram [" + unit.shortName + "] only week, day, hour, minute and second are supported for this histogram"); } } else { if (timeUnit.isMillisBased) { return (double) (nextRoundingValue(utcMillis) - utcMillis) / timeUnit.ratio; } else { return (double) unit.ratio / timeUnit.ratio; } } } @Override public double roundingSize(DateTimeUnit timeUnit) { if (unit.isMillisBased) { if (timeUnit.isMillisBased) { return (double) unit.ratio / timeUnit.ratio; } else { throw new IllegalArgumentException("Cannot use month-based rate unit [" + timeUnit.shortName + "] with non-month based calendar interval histogram [" + unit.shortName + "] only week, day, hour, minute and second are supported for this histogram"); } } else { if (timeUnit.isMillisBased) { throw new IllegalArgumentException("Cannot use non month-based rate unit [" + timeUnit.shortName + "] with calendar interval histogram [" + unit.shortName + "] only month, quarter and year are supported for this histogram"); } else { return (double) unit.ratio / timeUnit.ratio; } } } } private class FixedToMidnightRounding extends TimeUnitPreparedRounding { private final LocalTimeOffset offset; FixedToMidnightRounding(LocalTimeOffset offset) { this.offset = offset; } @Override public long round(long utcMillis) { return offset.localToUtcInThisOffset(unit.roundFloor(offset.utcToLocalTime(utcMillis))); } @Override public long nextRoundingValue(long utcMillis) { // TODO this is used in date range's collect so we should optimize it too return new JavaTimeToMidnightRounding().nextRoundingValue(utcMillis); } } private class FixedNotToMidnightRounding extends TimeUnitPreparedRounding { private final LocalTimeOffset offset; private final long unitMillis; FixedNotToMidnightRounding(LocalTimeOffset offset, long unitMillis) { this.offset = offset; this.unitMillis = unitMillis; } @Override public long round(long utcMillis) { return offset.localToUtcInThisOffset(unit.roundFloor(offset.utcToLocalTime(utcMillis))); } @Override public final long nextRoundingValue(long utcMillis) { return round(utcMillis + unitMillis); } } private class ToMidnightRounding extends TimeUnitPreparedRounding implements LocalTimeOffset.Strategy { private final LocalTimeOffset.Lookup lookup; ToMidnightRounding(LocalTimeOffset.Lookup lookup) { this.lookup = lookup; } @Override public long round(long utcMillis) { LocalTimeOffset offset = lookup.lookup(utcMillis); return offset.localToUtc(unit.roundFloor(offset.utcToLocalTime(utcMillis)), this); } @Override public long nextRoundingValue(long utcMillis) { // TODO this is actually used date range's collect so we should optimize it return new JavaTimeToMidnightRounding().nextRoundingValue(utcMillis); } @Override public long inGap(long localMillis, Gap gap) { return gap.startUtcMillis(); } @Override public long beforeGap(long localMillis, Gap gap) { return gap.previous().localToUtc(localMillis, this); } @Override public long inOverlap(long localMillis, Overlap overlap) { return overlap.previous().localToUtc(localMillis, this); } @Override public long beforeOverlap(long localMillis, Overlap overlap) { return overlap.previous().localToUtc(localMillis, this); } @Override protected Prepared maybeUseArray(long minUtcMillis, long maxUtcMillis, int max) { if (lookup.anyMoveBackToPreviousDay()) { return this; } return super.maybeUseArray(minUtcMillis, maxUtcMillis, max); } } private class NotToMidnightRounding extends AbstractNotToMidnightRounding implements LocalTimeOffset.Strategy { private final LocalTimeOffset.Lookup lookup; NotToMidnightRounding(LocalTimeOffset.Lookup lookup, long unitMillis) { super(unitMillis); this.lookup = lookup; } @Override public long round(long utcMillis) { LocalTimeOffset offset = lookup.lookup(utcMillis); long roundedLocalMillis = unit.roundFloor(offset.utcToLocalTime(utcMillis)); return offset.localToUtc(roundedLocalMillis, this); } @Override public long inGap(long localMillis, Gap gap) { // Round from just before the start of the gap return gap.previous().localToUtc(unit.roundFloor(gap.firstMissingLocalTime() - 1), this); } @Override public long beforeGap(long localMillis, Gap gap) { return inGap(localMillis, gap); } @Override public long inOverlap(long localMillis, Overlap overlap) { // Convert the overlap at this offset because that'll produce the largest result. return overlap.localToUtcInThisOffset(localMillis); } @Override public long beforeOverlap(long localMillis, Overlap overlap) { if (overlap.firstNonOverlappingLocalTime() - overlap.firstOverlappingLocalTime() >= unitMillis) { return overlap.localToUtcInThisOffset(localMillis); } return overlap.previous().localToUtc(localMillis, this); // This is mostly for Asia/Lord_Howe } } private class JavaTimeToMidnightRounding extends TimeUnitPreparedRounding { @Override public long round(long utcMillis) { LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone); LocalDateTime localMidnight = truncateLocalDateTime(localDateTime); return firstTimeOnDay(localMidnight); } @Override public long nextRoundingValue(long utcMillis) { LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone); LocalDateTime earlierLocalMidnight = truncateLocalDateTime(localDateTime); LocalDateTime localMidnight = nextRelevantMidnight(earlierLocalMidnight); return firstTimeOnDay(localMidnight); } @Override protected Prepared maybeUseArray(long minUtcMillis, long maxUtcMillis, int max) { // We don't have the right information needed to know if this is safe for this time zone so we always use java rounding return this; } private long firstTimeOnDay(LocalDateTime localMidnight) { assert localMidnight.toLocalTime().equals(LocalTime.of(0, 0, 0)) : "firstTimeOnDay should only be called at midnight"; // Now work out what localMidnight actually means final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(localMidnight); if (currentOffsets.isEmpty() == false) { // There is at least one midnight on this day, so choose the first final ZoneOffset firstOffset = currentOffsets.get(0); final OffsetDateTime offsetMidnight = localMidnight.atOffset(firstOffset); return offsetMidnight.toInstant().toEpochMilli(); } else { // There were no midnights on this day, so we must have entered the day via an offset transition. // Use the time of the transition as it is the earliest time on the right day. ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(localMidnight); return zoneOffsetTransition.getInstant().toEpochMilli(); } } private LocalDateTime nextRelevantMidnight(LocalDateTime localMidnight) { assert localMidnight.toLocalTime().equals(LocalTime.MIDNIGHT) : "nextRelevantMidnight should only be called at midnight"; switch (unit) { case DAY_OF_MONTH: return localMidnight.plus(1, ChronoUnit.DAYS); case WEEK_OF_WEEKYEAR: return localMidnight.plus(7, ChronoUnit.DAYS); case MONTH_OF_YEAR: return localMidnight.plus(1, ChronoUnit.MONTHS); case QUARTER_OF_YEAR: return localMidnight.plus(3, ChronoUnit.MONTHS); case YEAR_OF_CENTURY: return localMidnight.plus(1, ChronoUnit.YEARS); default: throw new IllegalArgumentException("Unknown round-to-midnight unit: " + unit); } } } private class JavaTimeNotToMidnightRounding extends AbstractNotToMidnightRounding { JavaTimeNotToMidnightRounding(long unitMillis) { super(unitMillis); } @Override public long round(long utcMillis) { Instant instant = Instant.ofEpochMilli(utcMillis); final ZoneRules rules = timeZone.getRules(); while (true) { final Instant truncatedTime = truncateAsLocalTime(instant, rules); final ZoneOffsetTransition previousTransition = rules.previousTransition(instant); if (previousTransition == null) { // truncateAsLocalTime cannot have failed if there were no previous transitions return truncatedTime.toEpochMilli(); } Instant previousTransitionInstant = previousTransition.getInstant(); if (truncatedTime != null && previousTransitionInstant.compareTo(truncatedTime) < 1) { return truncatedTime.toEpochMilli(); } // There was a transition in between the input time and the truncated time. Return to the transition time and // round that down instead. instant = previousTransitionInstant.minusNanos(1_000_000); } } private Instant truncateAsLocalTime(Instant instant, final ZoneRules rules) { assert unitRoundsToMidnight == false : "truncateAsLocalTime should not be called if unitRoundsToMidnight"; LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, timeZone); final LocalDateTime truncatedLocalDateTime = truncateLocalDateTime(localDateTime); final List<ZoneOffset> currentOffsets = rules.getValidOffsets(truncatedLocalDateTime); if (currentOffsets.isEmpty() == false) { // at least one possibilities - choose the latest one that's still no later than the input time for (int offsetIndex = currentOffsets.size() - 1; offsetIndex >= 0; offsetIndex--) { final Instant result = truncatedLocalDateTime.atOffset(currentOffsets.get(offsetIndex)).toInstant(); if (result.isAfter(instant) == false) { return result; } } assert false : "rounded time not found for " + instant + " with " + this; return null; } else { // The chosen local time didn't happen. This means we were given a time in an hour (or a minute) whose start // is missing due to an offset transition, so the time cannot be truncated. return null; } } } private abstract class AbstractNotToMidnightRounding extends TimeUnitPreparedRounding { protected final long unitMillis; AbstractNotToMidnightRounding(long unitMillis) { this.unitMillis = unitMillis; } @Override public final long nextRoundingValue(long utcMillis) { final long roundedAfterOneIncrement = round(utcMillis + unitMillis); if (utcMillis < roundedAfterOneIncrement) { return roundedAfterOneIncrement; } else { return round(utcMillis + 2 * unitMillis); } } } } static class TimeIntervalRounding extends Rounding { static final byte ID = 2; private final long interval; private final ZoneId timeZone; TimeIntervalRounding(long interval, ZoneId timeZone) { if (interval < 1) throw new IllegalArgumentException("Zero or negative time interval not supported"); this.interval = interval; this.timeZone = timeZone; } TimeIntervalRounding(StreamInput in) throws IOException { this(in.readVLong(), in.readZoneId()); } @Override public void innerWriteTo(StreamOutput out) throws IOException { out.writeVLong(interval); out.writeZoneId(timeZone); } @Override public byte id() { return ID; } @Override public Prepared prepare(long minUtcMillis, long maxUtcMillis) { /* * 128 is a power of two that isn't huge. We might be able to do * better if the limit was based on the actual type of prepared * rounding but this'll do for now. */ return prepareOffsetOrJavaTimeRounding(minUtcMillis, maxUtcMillis).maybeUseArray(minUtcMillis, maxUtcMillis, 128); } private TimeIntervalPreparedRounding prepareOffsetOrJavaTimeRounding(long minUtcMillis, long maxUtcMillis) { long minLookup = minUtcMillis - interval; long maxLookup = maxUtcMillis; LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(timeZone, minLookup, maxLookup); if (lookup == null) { return prepareJavaTime(); } LocalTimeOffset fixedOffset = lookup.fixedInRange(minLookup, maxLookup); if (fixedOffset != null) { return new FixedRounding(fixedOffset); } return new VariableRounding(lookup); } @Override public Prepared prepareForUnknown() { LocalTimeOffset offset = LocalTimeOffset.fixedOffset(timeZone); if (offset != null) { return new FixedRounding(offset); } return prepareJavaTime(); } @Override TimeIntervalPreparedRounding prepareJavaTime() { return new JavaTimeRounding(); } @Override public long offset() { return 0; } @Override public Rounding withoutOffset() { return this; } @Override public int hashCode() { return Objects.hash(interval, timeZone); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TimeIntervalRounding other = (TimeIntervalRounding) obj; return Objects.equals(interval, other.interval) && Objects.equals(timeZone, other.timeZone); } @Override public String toString() { return "Rounding[" + interval + " in " + timeZone + "]"; } private long roundKey(long value, long interval) { if (value < 0) { return (value - interval + 1) / interval; } else { return value / interval; } } private abstract class TimeIntervalPreparedRounding extends PreparedRounding { @Override public double roundingSize(long utcMillis, DateTimeUnit timeUnit) { return roundingSize(timeUnit); } @Override public double roundingSize(DateTimeUnit timeUnit) { if (timeUnit.isMillisBased) { return (double) interval / timeUnit.ratio; } else { throw new IllegalArgumentException("Cannot use month-based rate unit [" + timeUnit.shortName + "] with fixed interval based histogram, only week, day, hour, minute and second are supported for " + "this histogram"); } } } /** * Rounds to down inside of a time zone with an "effectively fixed" * time zone. A time zone can be "effectively fixed" if: * <ul> * <li>It is UTC</li> * <li>It is a fixed offset from UTC at all times (UTC-5, America/Phoenix)</li> * <li>It is fixed over the entire range of dates that will be rounded</li> * </ul> */ private class FixedRounding extends TimeIntervalPreparedRounding { private final LocalTimeOffset offset; FixedRounding(LocalTimeOffset offset) { this.offset = offset; } @Override public long round(long utcMillis) { return offset.localToUtcInThisOffset(roundKey(offset.utcToLocalTime(utcMillis), interval) * interval); } @Override public long nextRoundingValue(long utcMillis) { // TODO this is used in date range's collect so we should optimize it too return new JavaTimeRounding().nextRoundingValue(utcMillis); } } /** * Rounds down inside of any time zone, even if it is not * "effectively fixed". See {@link FixedRounding} for a description of * "effectively fixed". */ private class VariableRounding extends TimeIntervalPreparedRounding implements LocalTimeOffset.Strategy { private final LocalTimeOffset.Lookup lookup; VariableRounding(LocalTimeOffset.Lookup lookup) { this.lookup = lookup; } @Override public long round(long utcMillis) { LocalTimeOffset offset = lookup.lookup(utcMillis); return offset.localToUtc(roundKey(offset.utcToLocalTime(utcMillis), interval) * interval, this); } @Override public long nextRoundingValue(long utcMillis) { // TODO this is used in date range's collect so we should optimize it too return new JavaTimeRounding().nextRoundingValue(utcMillis); } @Override public long inGap(long localMillis, Gap gap) { return gap.startUtcMillis(); } @Override public long beforeGap(long localMillis, Gap gap) { return gap.previous().localToUtc(localMillis, this); } @Override public long inOverlap(long localMillis, Overlap overlap) { // Convert the overlap at this offset because that'll produce the largest result. return overlap.localToUtcInThisOffset(localMillis); } @Override public long beforeOverlap(long localMillis, Overlap overlap) { return overlap.previous().localToUtc(roundKey(overlap.firstNonOverlappingLocalTime() - 1, interval) * interval, this); } } /** * Rounds down inside of any time zone using {@link LocalDateTime} * directly. It'll be slower than {@link VariableRounding} and much * slower than {@link FixedRounding}. We use it when we don' have an * "effectively fixed" time zone and we can't get a * {@link LocalTimeOffset.Lookup}. We might not be able to get one * because: * <ul> * <li>We don't know how to look up the minimum and maximum dates we * are going to round.</li> * <li>We expect to round over thousands and thousands of years worth * of dates with the same {@link Prepared} instance.</li> * </ul> */ private class JavaTimeRounding extends TimeIntervalPreparedRounding { @Override public long round(long utcMillis) { final Instant utcInstant = Instant.ofEpochMilli(utcMillis); final LocalDateTime rawLocalDateTime = LocalDateTime.ofInstant(utcInstant, timeZone); // a millisecond value with the same local time, in UTC, as `utcMillis` has in `timeZone` final long localMillis = utcMillis + timeZone.getRules().getOffset(utcInstant).getTotalSeconds() * 1000; assert localMillis == rawLocalDateTime.toInstant(ZoneOffset.UTC).toEpochMilli(); final long roundedMillis = roundKey(localMillis, interval) * interval; final LocalDateTime roundedLocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(roundedMillis), ZoneOffset.UTC); // Now work out what roundedLocalDateTime actually means final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(roundedLocalDateTime); if (currentOffsets.isEmpty() == false) { // There is at least one instant with the desired local time. In general the desired result is // the latest rounded time that's no later than the input time, but this could involve rounding across // a timezone transition, which may yield the wrong result final ZoneOffsetTransition previousTransition = timeZone.getRules().previousTransition(utcInstant.plusMillis(1)); for (int offsetIndex = currentOffsets.size() - 1; 0 <= offsetIndex; offsetIndex--) { final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(offsetIndex)); final Instant offsetInstant = offsetTime.toInstant(); if (previousTransition != null && offsetInstant.isBefore(previousTransition.getInstant())) { /* * Rounding down across the transition can yield the * wrong result. It's best to return to the transition * time and round that down. */ return round(previousTransition.getInstant().toEpochMilli() - 1); } if (utcInstant.isBefore(offsetTime.toInstant()) == false) { return offsetInstant.toEpochMilli(); } } final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(0)); final Instant offsetInstant = offsetTime.toInstant(); assert false : this + " failed to round " + utcMillis + " down: " + offsetInstant + " is the earliest possible"; return offsetInstant.toEpochMilli(); // TODO or throw something? } else { // The desired time isn't valid because within a gap, so just return the start of the gap ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(roundedLocalDateTime); return zoneOffsetTransition.getInstant().toEpochMilli(); } } @Override public long nextRoundingValue(long utcMillis) { /* * Ok. I'm not proud of this, but it gets the job done. So here is the deal: * its super important that nextRoundingValue be *exactly* the next rounding * value. And I can't come up with a nice way to use the java time API to figure * it out. Thus, we treat "round" like a black box here and run a kind of whacky * binary search, newton's method hybrid. We don't have a "slope" so we can't do * a "real" newton's method, so we just sort of cut the diff in half. As janky * as it looks, it tends to get the job done in under four iterations. Frankly, * `round(round(utcMillis) + interval)` is usually a good guess so we mostly get * it in a single iteration. But daylight savings time and other janky stuff can * make it less likely. */ long prevRound = round(utcMillis); long increment = interval; long from = prevRound; int iterations = 0; while (++iterations < 100) { from += increment; long rounded = round(from); boolean highEnough = rounded > prevRound; if (false == highEnough) { if (increment < 0) { increment = -increment / 2; } continue; } long roundedRoundedDown = round(rounded - 1); boolean tooHigh = roundedRoundedDown > prevRound; if (tooHigh) { if (increment > 0) { increment = -increment / 2; } continue; } assert highEnough && (false == tooHigh); assert roundedRoundedDown == prevRound; if (iterations > 3 && logger.isDebugEnabled()) { logger.debug("Iterated {} time for {} using {}", iterations, utcMillis, TimeIntervalRounding.this.toString()); } return rounded; } /* * After 100 iterations we still couldn't settle on something! Crazy! * The most I've seen in tests is 20 and its usually 1 or 2. If we're * not in a test let's log something and round from our best guess. */ assert false : String.format( Locale.ROOT, "Expected to find the rounding in 100 iterations but didn't for [%d] with [%s]", utcMillis, TimeIntervalRounding.this.toString() ); logger.debug( "Expected to find the rounding in 100 iterations but didn't for {} using {}", utcMillis, TimeIntervalRounding.this.toString() ); return round(from); } } } static class OffsetRounding extends Rounding { static final byte ID = 3; private final Rounding delegate; private final long offset; OffsetRounding(Rounding delegate, long offset) { this.delegate = delegate; this.offset = offset; } OffsetRounding(StreamInput in) throws IOException { // Versions before 7.6.0 will never send this type of rounding. delegate = Rounding.read(in); offset = in.readZLong(); } @Override public void innerWriteTo(StreamOutput out) throws IOException { delegate.writeTo(out); out.writeZLong(offset); } @Override public byte id() { return ID; } @Override public Prepared prepare(long minUtcMillis, long maxUtcMillis) { return wrapPreparedRounding(delegate.prepare(minUtcMillis - offset, maxUtcMillis - offset)); } @Override public Prepared prepareForUnknown() { return wrapPreparedRounding(delegate.prepareForUnknown()); } @Override Prepared prepareJavaTime() { return wrapPreparedRounding(delegate.prepareJavaTime()); } private Prepared wrapPreparedRounding(Prepared delegatePrepared) { return new Prepared() { @Override public long round(long utcMillis) { return delegatePrepared.round(utcMillis - offset) + offset; } @Override public long nextRoundingValue(long utcMillis) { return delegatePrepared.nextRoundingValue(utcMillis - offset) + offset; } @Override public double roundingSize(long utcMillis, DateTimeUnit timeUnit) { return delegatePrepared.roundingSize(utcMillis, timeUnit); } @Override public double roundingSize(DateTimeUnit timeUnit) { return delegatePrepared.roundingSize(timeUnit); } @Override public long[] fixedRoundingPoints() { // TODO we can likely translate here return null; } }; } @Override public long offset() { return offset; } @Override public Rounding withoutOffset() { return delegate; } @Override public int hashCode() { return Objects.hash(delegate, offset); } @Override public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { return false; } OffsetRounding other = (OffsetRounding) obj; return delegate.equals(other.delegate) && offset == other.offset; } @Override public String toString() { return delegate + " offset by " + offset; } } public static Rounding read(StreamInput in) throws IOException { byte id = in.readByte(); switch (id) { case TimeUnitRounding.ID: return new TimeUnitRounding(in); case TimeIntervalRounding.ID: return new TimeIntervalRounding(in); case OffsetRounding.ID: return new OffsetRounding(in); default: throw new ElasticsearchException("unknown rounding id [" + id + "]"); } } /** * Implementation of {@link Prepared} using pre-calculated "round down" points. */ private static class ArrayRounding implements Prepared { private final long[] values; private final int max; private final Prepared delegate; private ArrayRounding(long[] values, int max, Prepared delegate) { this.values = values; this.max = max; this.delegate = delegate; } @Override public long round(long utcMillis) { assert values[0] <= utcMillis : utcMillis + " must be after " + values[0]; int idx = Arrays.binarySearch(values, 0, max, utcMillis); assert idx != -1 : "The insertion point is before the array! This should have tripped the assertion above."; assert -1 - idx <= values.length : "This insertion point is after the end of the array."; if (idx < 0) { idx = -2 - idx; } return values[idx]; } @Override public long nextRoundingValue(long utcMillis) { return delegate.nextRoundingValue(utcMillis); } @Override public double roundingSize(long utcMillis, DateTimeUnit timeUnit) { return delegate.roundingSize(utcMillis, timeUnit); } @Override public double roundingSize(DateTimeUnit timeUnit) { return delegate.roundingSize(timeUnit); } @Override public long[] fixedRoundingPoints() { return Arrays.copyOf(values, max); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.docker.springboot; import java.util.Map; import javax.annotation.Generated; import org.apache.camel.component.docker.DockerOperation; import org.springframework.boot.context.properties.ConfigurationProperties; /** * The docker component is used for managing Docker containers. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @ConfigurationProperties(prefix = "camel.component.docker") public class DockerComponentConfiguration { /** * To use the shared docker configuration */ private DockerConfigurationNestedConfiguration configuration; /** * Whether the component should resolve property placeholders on itself when * starting. Only properties which are of String type can use property * placeholders. */ private Boolean resolvePropertyPlaceholders = true; public DockerConfigurationNestedConfiguration getConfiguration() { return configuration; } public void setConfiguration( DockerConfigurationNestedConfiguration configuration) { this.configuration = configuration; } public Boolean getResolvePropertyPlaceholders() { return resolvePropertyPlaceholders; } public void setResolvePropertyPlaceholders( Boolean resolvePropertyPlaceholders) { this.resolvePropertyPlaceholders = resolvePropertyPlaceholders; } public static class DockerConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.docker.DockerConfiguration.class; /** * Docker host */ private String host = "localhost"; /** * Docker port */ private Integer port = 2375; /** * User name to authenticate with */ private String username; /** * Password to authenticate with */ private String password; /** * Email address associated with the user */ private String email; /** * Server address for docker registry. */ private String serverAddress = "https://index.docker.io/v1/"; /** * Request timeout for response (in seconds) */ private Integer requestTimeout; /** * Use HTTPS communication */ private Boolean secure = false; /** * Location containing the SSL certificate chain */ private String certPath; /** * Maximum total connections */ private Integer maxTotalConnections = 100; /** * Maximum route connections */ private Integer maxPerRouteConnections = 100; /** * Whether to use logging filter */ private Boolean loggingFilter = false; /** * Whether to follow redirect filter */ private Boolean followRedirectFilter = false; /** * Additional configuration parameters as key/value pairs */ private Map parameters; /** * Which operation to use */ private DockerOperation operation; /** * Check TLS */ private Boolean tlsVerify = false; /** * Socket connection mode */ private Boolean socket = true; /** * The fully qualified class name of the DockerCmdExecFactory * implementation to use */ private String cmdExecFactory = "com.github.dockerjava.jaxrs.JerseyDockerCmdExecFactory"; public String getHost() { return host; } public void setHost(String host) { this.host = host; } public Integer getPort() { return port; } public void setPort(Integer port) { this.port = port; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getServerAddress() { return serverAddress; } public void setServerAddress(String serverAddress) { this.serverAddress = serverAddress; } public Integer getRequestTimeout() { return requestTimeout; } public void setRequestTimeout(Integer requestTimeout) { this.requestTimeout = requestTimeout; } public Boolean getSecure() { return secure; } public void setSecure(Boolean secure) { this.secure = secure; } public String getCertPath() { return certPath; } public void setCertPath(String certPath) { this.certPath = certPath; } public Integer getMaxTotalConnections() { return maxTotalConnections; } public void setMaxTotalConnections(Integer maxTotalConnections) { this.maxTotalConnections = maxTotalConnections; } public Integer getMaxPerRouteConnections() { return maxPerRouteConnections; } public void setMaxPerRouteConnections(Integer maxPerRouteConnections) { this.maxPerRouteConnections = maxPerRouteConnections; } public Boolean getLoggingFilter() { return loggingFilter; } public void setLoggingFilter(Boolean loggingFilter) { this.loggingFilter = loggingFilter; } public Boolean getFollowRedirectFilter() { return followRedirectFilter; } public void setFollowRedirectFilter(Boolean followRedirectFilter) { this.followRedirectFilter = followRedirectFilter; } public Map getParameters() { return parameters; } public void setParameters(Map parameters) { this.parameters = parameters; } public DockerOperation getOperation() { return operation; } public void setOperation(DockerOperation operation) { this.operation = operation; } public Boolean getTlsVerify() { return tlsVerify; } public void setTlsVerify(Boolean tlsVerify) { this.tlsVerify = tlsVerify; } public Boolean getSocket() { return socket; } public void setSocket(Boolean socket) { this.socket = socket; } public String getCmdExecFactory() { return cmdExecFactory; } public void setCmdExecFactory(String cmdExecFactory) { this.cmdExecFactory = cmdExecFactory; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.partitioned.fixed; import static java.util.concurrent.TimeUnit.HOURS; import static org.apache.geode.test.awaitility.GeodeAwaitility.await; import static org.apache.geode.test.dunit.VM.getHostName; import static org.apache.geode.test.dunit.VM.getVM; import static org.assertj.core.api.Assertions.catchThrowable; import static org.assertj.core.api.Java6Assertions.assertThat; import static org.assertj.core.api.Java6Assertions.assertThatThrownBy; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; import java.util.HashSet; import java.util.Set; import org.assertj.core.api.Assertions; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import org.apache.geode.DataSerializable; import org.apache.geode.cache.CacheTransactionManager; import org.apache.geode.cache.EntryOperation; import org.apache.geode.cache.FixedPartitionAttributes; import org.apache.geode.cache.FixedPartitionResolver; import org.apache.geode.cache.PartitionAttributesFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.TransactionDataRebalancedException; import org.apache.geode.cache.TransactionId; import org.apache.geode.cache.client.ClientRegionFactory; import org.apache.geode.cache.client.ClientRegionShortcut; import org.apache.geode.cache.client.PoolFactory; import org.apache.geode.cache.client.PoolManager; import org.apache.geode.cache.client.ServerOperationException; import org.apache.geode.cache.client.internal.ClientMetadataService; import org.apache.geode.cache.client.internal.PoolImpl; import org.apache.geode.cache.execute.Execution; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionContext; import org.apache.geode.cache.execute.FunctionException; import org.apache.geode.cache.execute.FunctionService; import org.apache.geode.cache.execute.RegionFunctionContext; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.cache.server.CacheServer; import org.apache.geode.distributed.internal.ServerLocation; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.internal.cache.InternalRegion; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.TXManagerImpl; import org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil; import org.apache.geode.test.dunit.Invoke; import org.apache.geode.test.dunit.SerializableCallableIF; import org.apache.geode.test.dunit.SerializableRunnableIF; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.rules.CacheRule; import org.apache.geode.test.dunit.rules.ClientCacheRule; import org.apache.geode.test.dunit.rules.DistributedDiskDirRule; import org.apache.geode.test.dunit.rules.DistributedRule; import org.apache.geode.test.junit.rules.serializable.SerializableTestName; @RunWith(Parameterized.class) public class FixedPartitioningWithTransactionDistributedTest implements Serializable { private String hostName; private String uniqueName; private String regionName; private VM server1; private VM server2; private VM accessor; private int port1; private int port2; private transient PoolImpl pool; private enum Type { ON_REGION, ON_SERVER, ON_MEMBER } private static final String FIXED_PARTITION_NAME = "singleBucket"; private enum ExecuteFunctionMethod { ExecuteFunctionByObject, ExecuteFunctionById } @Parameters(name = "{0}") public static ExecuteFunctionMethod[] data() { return ExecuteFunctionMethod.values(); } @Parameter public ExecuteFunctionMethod functionExecutionType; private boolean executeFunctionByIdOnClient() { return ExecuteFunctionMethod.ExecuteFunctionById == functionExecutionType; } @Rule public DistributedRule distributedRule = new DistributedRule(); @Rule public DistributedDiskDirRule distributedDiskDir = new DistributedDiskDirRule(); @Rule public CacheRule cacheRule = new CacheRule(); @Rule public ClientCacheRule clientCacheRule = new ClientCacheRule(); @Rule public SerializableTestName testName = new SerializableTestName(); @Before public void setup() { server1 = getVM(0); server2 = getVM(1); accessor = getVM(2); hostName = getHostName(); uniqueName = getClass().getSimpleName() + "_" + testName.getMethodName(); regionName = uniqueName + "_region"; Invoke.invokeInEveryVM(() -> { TXManagerImpl.ALLOW_PERSISTENT_TRANSACTIONS = true; }); } @After public void tearDown() { Invoke.invokeInEveryVM(() -> { TXManagerImpl.ALLOW_PERSISTENT_TRANSACTIONS = false; cacheRule.closeAndNullCache(); }); } @Test public void executeFunctionOnMovedPrimaryBucketFailWithTransactionDataRebalancedException() { createData(); server1.invoke(() -> cacheRule.closeAndNullCache()); TransactionId transactionId = server2.invoke( (SerializableCallableIF<TransactionId>) this::doFunctionTransactionAndSuspend); server1.invoke(this::restartPrimary); server2.invoke(() -> { assertThatThrownBy(() -> resumeFunctionTransaction(transactionId)) .isInstanceOf(TransactionDataRebalancedException.class); }); } @Test public void accessorExecuteFunctionOnMovedPrimaryBucketFailWithTransactionDataRebalancedException() { createData(); server2.invoke(this::registerFunctions); accessor.invoke(() -> createServerRegion(false, 1, 1, true)); accessor.invoke(this::registerFunctions); server1.invoke(() -> cacheRule.closeAndNullCache()); TransactionId transactionId = accessor.invoke( (SerializableCallableIF<TransactionId>) this::doFunctionTransactionAndSuspend); server1.invoke(this::restartPrimary); accessor.invoke(() -> { assertThatThrownBy(() -> resumeFunctionTransaction(transactionId)) .isInstanceOf(TransactionDataRebalancedException.class); }); } @Test public void clientExecuteFunctionOnMovedPrimaryBucketFailWithTransactionDataRebalancedException() { server1.invoke(() -> createServerRegion(true, 1, 1)); int port2 = server2.invoke(() -> createServerRegion(false, 1, 1)); int port1 = accessor.invoke(() -> createServerRegion(false, 1, 1, true)); server1.invoke((SerializableRunnableIF) this::doPuts); server1.invoke(() -> cacheRule.closeAndNullCache()); server2.invoke(this::registerFunctions); accessor.invoke(this::registerFunctions); createClientRegion(true, port1, port2); Region region = clientCacheRule.getClientCache().getRegion(regionName); CacheTransactionManager txManager = clientCacheRule.getClientCache().getCacheTransactionManager(); TransactionId transactionId = doFunctionTransactionAndSuspend(region, txManager); accessor.invoke(() -> cacheRule.closeAndNullCache()); server1.invoke(this::restartPrimary); Throwable caughtException = catchThrowable(() -> resumeFunctionTransaction(transactionId, region, txManager)); Assertions.assertThat(caughtException).isInstanceOf(FunctionException.class); Assertions.assertThat(caughtException.getCause()) .isInstanceOf(TransactionDataRebalancedException.class); } @Test public void clientCanRollbackFunctionOnRegionWithFilterAndWithSingleHopEnabled() { setupServers(); setupClient(); Region region = clientCacheRule.getClientCache().getRegion(regionName); CacheTransactionManager txManager = clientCacheRule.getClientCache().getCacheTransactionManager(); TransactionId transactionId = doFunctionTransactionAndSuspend(region, txManager, new MyTransactionFunction()); txManager.resume(transactionId); txManager.rollback(); server1.invoke(() -> { assertThat(cacheRule.getCache().getRegion(regionName).get(2)).isEqualTo(2); }); } private void forceClientMetadataUpdate(Region region) { ClientMetadataService clientMetadataService = ((InternalCache) clientCacheRule.getClientCache()).getClientMetadataService(); clientMetadataService.scheduleGetPRMetaData((InternalRegion) region, true); await().atMost(5, HOURS).until(clientMetadataService::isMetadataStable); } @Test public void clientCanRollbackFunctionOnRegionWithoutFilterAndWithSingleHopEnabled() { setupServers(); setupClient(); Region region = clientCacheRule.getClientCache().getRegion(regionName); CacheTransactionManager txManager = clientCacheRule.getClientCache().getCacheTransactionManager(); try { TransactionId transactionId = doFunctionTransactionAndSuspend(region, txManager, new MyTransactionFunction(), Type.ON_REGION, false); txManager.resume(transactionId); txManager.rollback(); } catch (FunctionException functionException) { // without filter function can target to any server and may not go to primary. assertThat(functionException.getCause()).isInstanceOf(ServerOperationException.class); assertThat(functionException.getCause().getCause()).isInstanceOf(FunctionException.class); assertThat(functionException.getCause().getCause().getCause()) .isInstanceOf(TransactionDataRebalancedException.class); txManager.rollback(); } server1.invoke(() -> { assertThat(cacheRule.getCache().getRegion(regionName).get(2)).isEqualTo(2); }); } @Test public void clientTransactionFailsIfExecuteFunctionOnMember() { setupServers(); setupClient(); Region region = clientCacheRule.getClientCache().getRegion(regionName); CacheTransactionManager txManager = clientCacheRule.getClientCache().getCacheTransactionManager(); Throwable caughtException = catchThrowable(() -> doFunctionTransactionAndSuspend(region, txManager, new MyTransactionFunction(), Type.ON_MEMBER)); assertThat(caughtException).isInstanceOf(UnsupportedOperationException.class); txManager.rollback(); } @Test public void clientTransactionFailsIfExecuteFunctionOnServer() { setupServers(); setupClient(); Region region = clientCacheRule.getClientCache().getRegion(regionName); CacheTransactionManager txManager = clientCacheRule.getClientCache().getCacheTransactionManager(); Throwable caughtException = catchThrowable(() -> doFunctionTransactionAndSuspend(region, txManager, new MyTransactionFunction(), Type.ON_SERVER)); assertThat(caughtException).isInstanceOf(FunctionException.class); assertThat(caughtException.getCause()).isInstanceOf(UnsupportedOperationException.class); txManager.rollback(); } private void setupServers() { port1 = server1.invoke(() -> createServerRegion(true, 1, 1)); port2 = server2.invoke(() -> createServerRegion(false, 1, 1)); server1.invoke(this::registerFunctions); server2.invoke(this::registerFunctions); } private void setupClient() { createClientRegion(true, true, port1, port2); Region<Integer, Integer> region = clientCacheRule.getClientCache().getRegion(regionName); doPuts(region); } private void restartPrimary() throws Exception { createServerRegion(true, 1, 1); PartitionedRegion partitionedRegion = (PartitionedRegion) cacheRule.getCache().getRegion(regionName); assertThat(partitionedRegion.get(1)).isEqualTo(1); await().until(() -> partitionedRegion.getBucketPrimary(0) .equals(cacheRule.getCache().getInternalDistributedSystem().getDistributedMember())); } private void createData() { server1.invoke(() -> createServerRegion(true, 1, 1)); server2.invoke(() -> createServerRegion(false, 1, 1)); server1.invoke((SerializableRunnableIF) this::doPuts); } private void doPuts() { doPuts(cacheRule.getCache().getRegion(regionName)); } private void doPuts(Region<Integer, Integer> region) { region.put(1, 1); region.put(2, 2); region.put(3, 3); } private int createServerRegion(boolean isPrimary, int redundantCopies, int totalNumOfBuckets) throws Exception { return createServerRegion(isPrimary, redundantCopies, totalNumOfBuckets, false); } private int createServerRegion(boolean isPrimary, int redundantCopies, int totalNumOfBuckets, boolean isAccessor) throws Exception { FixedPartitionAttributes fixedPartition = FixedPartitionAttributes.createFixedPartition(FIXED_PARTITION_NAME, isPrimary, totalNumOfBuckets); PartitionAttributesFactory<Integer, Integer> factory = new PartitionAttributesFactory<>(); factory.setRedundantCopies(redundantCopies).setTotalNumBuckets(totalNumOfBuckets) .setPartitionResolver(new MyFixedPartitionResolver()); if (isAccessor) { factory.setLocalMaxMemory(0); } else { factory.addFixedPartitionAttributes(fixedPartition); } cacheRule.getOrCreateCache().createRegionFactory( isAccessor ? RegionShortcut.PARTITION : RegionShortcut.PARTITION_PERSISTENT) .setPartitionAttributes(factory.create()).create(regionName); CacheServer server = cacheRule.getCache().addCacheServer(); server.setPort(0); server.start(); return server.getPort(); } private void createClientRegion(boolean connectToFirstPort, int... ports) { createClientRegion(connectToFirstPort, false, ports); } private void createClientRegion(boolean connectToFirstPort, boolean singleHopEnabled, int... ports) { clientCacheRule.createClientCache(); CacheServerTestUtil.disableShufflingOfEndpoints(); try { pool = getPool(singleHopEnabled, ports); } finally { CacheServerTestUtil.enableShufflingOfEndpoints(); } ClientRegionFactory crf = clientCacheRule.getClientCache().createClientRegionFactory(ClientRegionShortcut.LOCAL); crf.setPoolName(pool.getName()); crf.create(regionName); if (ports.length > 1 && connectToFirstPort) { // first connection to the first port in the list pool.acquireConnection(new ServerLocation(hostName, ports[0])); } } private PoolImpl getPool(boolean singleHopEnabled, int... ports) { PoolFactory factory = PoolManager.createFactory(); for (int port : ports) { factory.addServer(hostName, port); } factory.setPRSingleHopEnabled(singleHopEnabled); return (PoolImpl) factory.create(uniqueName); } private void registerFunctions() { FunctionService.registerFunction(new MySuspendTransactionFunction()); FunctionService.registerFunction(new MyResumeTransactionFunction()); FunctionService.registerFunction(new MyTransactionFunction()); } private TransactionId doFunctionTransactionAndSuspend() { Region region = cacheRule.getCache().getRegion(regionName); TXManagerImpl manager = cacheRule.getCache().getTxManager(); return doFunctionTransactionAndSuspend(region, manager); } private TransactionId doFunctionTransactionAndSuspend(Region region, CacheTransactionManager manager) { return doFunctionTransactionAndSuspend(region, manager, new MySuspendTransactionFunction()); } private TransactionId doFunctionTransactionAndSuspend(Region region, CacheTransactionManager manager, Function function) { return doFunctionTransactionAndSuspend(region, manager, function, Type.ON_REGION); } private TransactionId doFunctionTransactionAndSuspend(Region region, CacheTransactionManager manager, Function function, Type type) { return doFunctionTransactionAndSuspend(region, manager, function, type, true); } private TransactionId doFunctionTransactionAndSuspend(Region region, CacheTransactionManager manager, Function function, Type type, boolean withFilter) { Execution execution; Set<Integer> keySet = new HashSet<>(); keySet.add(2); switch (type) { case ON_MEMBER: execution = FunctionService.onMembers(); break; case ON_REGION: execution = FunctionService.onRegion(region); if (withFilter) { execution = execution.withFilter(keySet); } break; case ON_SERVER: execution = FunctionService.onServers(pool); break; default: throw new RuntimeException("unexpected type"); } boolean executeFunctionByIdOnClient = false; if (clientCacheRule.getClientCache() != null) { forceClientMetadataUpdate(region); executeFunctionByIdOnClient = executeFunctionByIdOnClient() && type != Type.ON_MEMBER; } manager.begin(); final ResultCollector resultCollector; if (executeFunctionByIdOnClient) { resultCollector = execution.execute(function.getId()); } else { resultCollector = execution.execute(function); } resultCollector.getResult(); return manager.suspend(); } private void resumeFunctionTransaction(TransactionId transactionId) { Region region = cacheRule.getCache().getRegion(regionName); TXManagerImpl manager = cacheRule.getCache().getTxManager(); resumeFunctionTransaction(transactionId, region, manager); } private void resumeFunctionTransaction(TransactionId transactionId, Region region, CacheTransactionManager manager) { Execution execution = FunctionService.onRegion(region); manager.resume(transactionId); try { Set<Integer> keySet = new HashSet<>(); keySet.add(3); ResultCollector resultCollector = execution.withFilter(keySet).execute(new MyResumeTransactionFunction()); resultCollector.getResult(); } finally { manager.rollback(); } } public static class MyFixedPartitionResolver implements FixedPartitionResolver<Integer, Integer> { @Override public String getPartitionName(final EntryOperation opDetails, @Deprecated final Set targetPartitions) { return FIXED_PARTITION_NAME; } @Override public Object getRoutingObject(final EntryOperation opDetails) { return opDetails.getKey(); } @Override public String getName() { return getClass().getName(); } } public static class MySuspendTransactionFunction implements Function, DataSerializable { @Override public void execute(FunctionContext context) { assertThat(context).isInstanceOf(RegionFunctionContext.class); PartitionedRegion region = (PartitionedRegion) ((RegionFunctionContext) context).getDataSet(); region.containsValueForKey(2); context.getResultSender().lastResult(Boolean.TRUE); } @Override public void toData(DataOutput out) throws IOException { } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { } } public static class MyResumeTransactionFunction implements Function, DataSerializable { @Override public void execute(FunctionContext context) { assertThat(context).isInstanceOf(RegionFunctionContext.class); PartitionedRegion region = (PartitionedRegion) ((RegionFunctionContext) context).getDataSet(); region.containsValueForKey(3); context.getResultSender().lastResult(Boolean.TRUE); } @Override public void toData(DataOutput out) throws IOException { } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { } } public static class MyTransactionFunction implements Function, DataSerializable { @Override public void execute(FunctionContext context) { if (context instanceof RegionFunctionContext) { PartitionedRegion region = (PartitionedRegion) ((RegionFunctionContext) context).getDataSet(); region.destroy(2); context.getResultSender().lastResult(Boolean.TRUE); } } @Override public void toData(DataOutput out) throws IOException { } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { } } }
/** * Copyright 2009 University of Oxford * * Written by Arno Mittelbach for the Erewhon Project * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * - Neither the name of the University of Oxford nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package net.sf.gaboto.entities.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.Collection; import java.util.Iterator; import net.sf.gaboto.EntityAlreadyExistsException; import net.sf.gaboto.Gaboto; import net.sf.gaboto.GabotoFactory; import net.sf.gaboto.node.GabotoEntity; import net.sf.gaboto.node.GabotoTimeBasedEntity; import net.sf.gaboto.test.TimeUtils; import net.sf.gaboto.time.TimeInstant; import net.sf.gaboto.time.TimeSpan; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import uk.ac.ox.oucs.oxpoints.gaboto.entities.Building; import uk.ac.ox.oucs.oxpoints.gaboto.entities.Unit; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.vocabulary.DC_11; public class TestGaboto { @BeforeClass public static void setUp() throws Exception { } @AfterClass public static void tearDown() { } @Test (expected=EntityAlreadyExistsException.class) public void testAddDuplicate() throws Exception{ Gaboto oxp = GabotoFactory.getInMemoryGaboto(); Unit u = new Unit(); u.setUri(TimeUtils.generateRandomURI()); oxp.add(u); oxp.add(u); } @Test public void testAddRemove() throws Exception{ Gaboto oxp = GabotoFactory.getPersistentGaboto(); Gaboto oxp_m = GabotoFactory.getInMemoryGaboto(); Unit u = new Unit(); u.setUri(TimeUtils.generateRandomURI()); Building b = new Building(); b.setUri(TimeUtils.generateRandomURI()); b.setName("Abcdef"); // add entities oxp.add(u); oxp.add(b); // test if entities were added assertTrue(oxp_m.containsEntity(u)); assertTrue(oxp_m.containsEntity(b)); // remove entities oxp.remove(u); oxp.remove(b); // test if entities were removed assertTrue(! oxp_m.containsEntity(u)); assertTrue(! oxp_m.containsEntity(b)); } @Test public void testLoadEntity() throws Exception{ Gaboto oxp = GabotoFactory.getPersistentGaboto(); Gaboto oxp_m = GabotoFactory.getInMemoryGaboto(); String uri = TimeUtils.generateRandomURI(); Building b = new Building(); b.setUri(uri); b.setTimeSpan(new TimeSpan(500,1,1,200,10,10)); b.setName("Abcdef"); oxp.add(b); Building b_loaded = (Building) oxp_m.getEntity(uri, new TimeInstant(600,1,1)); assertNotNull("Should have found something", b_loaded); assertEquals(b_loaded.getName(), b.getName()); assertEquals(b_loaded.getTimeSpan(), b.getTimeSpan()); } @SuppressWarnings("unchecked") @Test public void testAddRemove2() throws Exception{ Gaboto oxp = GabotoFactory.getPersistentGaboto(); Unit u = new Unit(); u.setUri(TimeUtils.generateRandomURI()); // add entity oxp.add(u); Iterator it = oxp.getNamedGraphSet().findQuads(Node.ANY, Node.createURI(u.getUri()), Node.ANY, Node.ANY); assertTrue(it.hasNext()); // remove entity oxp.remove(u); it = oxp.getNamedGraphSet().findQuads(Node.ANY, Node.createURI(u.getUri()), Node.ANY, Node.ANY); assertTrue(! it.hasNext()); } @Test public void testGetEntityURIs() throws Exception { Gaboto oxp = GabotoFactory.getInMemoryGaboto(); Collection<String> uris = oxp.getEntityURIsFor(DC_11.title); assertTrue(uris.size() > 0); int counter = 0; for(String u : uris){ if (counter++>30) continue; GabotoTimeBasedEntity tb = oxp.getEntityOverTime(u); Iterator<GabotoEntity> it = tb.iterator(); while(it.hasNext()){ GabotoEntity entity = it.next(); Object titleO = entity.getPropertyValue(DC_11.title); if(! (titleO instanceof String)) continue; String title = (String) titleO; assertTrue(oxp.getEntityURIsFor(DC_11.title, title).contains(u)); // time based Collection<GabotoTimeBasedEntity> tbEntities = oxp.loadEntitiesOverTimeWithProperty(DC_11.title, title); assertTrue(tbEntities.size() > 0); } } } /** * Test method for {@link net.sf.gaboto.Gaboto#equals(java.lang.Object)}. */ @Test public void testEqualsObject() { System.err.println("testGabotoEquals"); Gaboto g1 = GabotoFactory.getEmptyInMemoryGaboto(); Model m1 = g1.getContextDescriptionGraph(); System.err.println("m1:"+m1); Gaboto g2 = GabotoFactory.getEmptyInMemoryGaboto(); System.err.println("m1:"+m1); Model m2 = g2.getContextDescriptionGraph(); System.err.println("m1:"+m1); System.err.println("m2:"+m2); assertTrue(m1.isIsomorphicWith(m2)); assertEquals("Not equal", g1,g1); assertEquals("Not equal", g2,g2); assertEquals("Not equal", g1,g2); assertEquals("Not equal", g2,g1); } }
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package com.intellij.codeInspection.reference; import com.intellij.analysis.AnalysisBundle; import com.intellij.java.analysis.JavaAnalysisBundle; import com.intellij.openapi.util.Iconable; import com.intellij.psi.*; import com.intellij.psi.util.PsiFormatUtil; import com.intellij.psi.util.PsiFormatUtilBase; import com.intellij.ui.CoreAwareIconManager; import com.intellij.ui.IconManager; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.Stack; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.uast.*; import javax.swing.*; import java.util.*; public abstract class RefJavaElementImpl extends RefElementImpl implements RefJavaElement { private Set<RefClass> myOutTypeReferences; // guarded by this private static final int ACCESS_MODIFIER_MASK = 0b11; private static final int ACCESS_PRIVATE = 0b00; private static final int ACCESS_PROTECTED = 0b01; private static final int ACCESS_PACKAGE = 0b10; private static final int ACCESS_PUBLIC = 0b11; private static final int IS_STATIC_MASK = 0b100; private static final int IS_FINAL_MASK = 0b1000; private static final int IS_SYNTHETIC_JSP_ELEMENT_MASK = 0b100_00000000; private static final int FORBID_PROTECTED_ACCESS_MASK = 0b1000_00000000; protected RefJavaElementImpl(@NotNull String name, @NotNull RefJavaElement owner) { super(name, owner); String am = owner.getAccessModifier(); doSetAccessModifier(am); final boolean synthOwner = owner.isSyntheticJSP(); if (synthOwner) { setSyntheticJSP(true); } } protected RefJavaElementImpl(UDeclaration elem, PsiElement psi, RefManager manager) { super(getName(elem), psi, manager); PsiModifierListOwner javaPsi = Objects.requireNonNull(ObjectUtils.tryCast(elem.getJavaPsi(), PsiModifierListOwner.class)); setAccessModifier(RefJavaUtil.getInstance().getAccessModifier(javaPsi)); final boolean isSynth = javaPsi instanceof PsiMethod && psi instanceof SyntheticElement || psi instanceof PsiSyntheticClass; if (isSynth) { setSyntheticJSP(true); } setIsStatic(elem.isStatic()); setIsFinal(elem.isFinal()); } RefJavaElementImpl(@NotNull UElement declaration, @NotNull PsiElement psi, @NotNull RefManager manager) { super(getName(declaration), psi, manager); } @Override @NotNull public synchronized Collection<RefClass> getOutTypeReferences() { return ObjectUtils.notNull(myOutTypeReferences, Collections.emptySet()); } synchronized void addOutTypeReference(RefClass refClass){ if (myOutTypeReferences == null){ myOutTypeReferences = new HashSet<>(); } myOutTypeReferences.add(refClass); } @NotNull private static String getName(@NotNull UElement declaration) { PsiElement element = declaration.getJavaPsi(); if (element instanceof PsiAnonymousClass) { PsiAnonymousClass psiAnonymousClass = (PsiAnonymousClass)element; PsiClass psiBaseClass = psiAnonymousClass.getBaseClassType().resolve(); if (psiBaseClass == null) { return "anonymous class"; } else { return JavaAnalysisBundle.message("inspection.reference.anonymous.name", psiBaseClass.getName()); } } if (element instanceof PsiSyntheticClass) { final PsiSyntheticClass jspClass = (PsiSyntheticClass)element; final PsiFile jspxFile = jspClass.getContainingFile(); return "<" + jspxFile.getName() + ">"; } if (element instanceof PsiMethod) { if (element instanceof SyntheticElement) { return JavaAnalysisBundle.message("inspection.reference.jsp.holder.method.anonymous.name"); } return PsiFormatUtil.formatMethod((PsiMethod)element, PsiSubstitutor.EMPTY, PsiFormatUtilBase.SHOW_NAME | PsiFormatUtilBase.SHOW_PARAMETERS, PsiFormatUtilBase.SHOW_TYPE); } if (declaration instanceof ULambdaExpression || declaration instanceof UCallableReferenceExpression) { UDeclaration elementDeclaration = UDeclarationKt.getContainingDeclaration(declaration); boolean isMethodReference = declaration instanceof UCallableReferenceExpression; if (elementDeclaration != null) { UAnnotated pDeclaration = UastUtils.getParentOfType(elementDeclaration, false, UMethod.class, UClass.class, ULambdaExpression.class, UField.class); if (pDeclaration != null && pDeclaration.getSourcePsi() instanceof PsiNamedElement) { String name = ((PsiNamedElement)pDeclaration.getSourcePsi()).getName(); return JavaAnalysisBundle.message( isMethodReference ? "inspection.reference.method.reference.name" : "inspection.reference.lambda.name", name); } } return JavaAnalysisBundle.message( isMethodReference ? "inspection.reference.default.method.reference.name" : "inspection.reference.default.lambda.name"); } String name = null; if (element instanceof PsiNamedElement) { name = ((PsiNamedElement)element).getName(); } return name == null ? AnalysisBundle.message("inspection.reference.anonymous") : name; } @Override public boolean isFinal() { return checkFlag(IS_FINAL_MASK); } @Override public boolean isStatic() { return checkFlag(IS_STATIC_MASK); } void setIsStatic(boolean isStatic) { setFlag(isStatic, IS_STATIC_MASK); } void setIsFinal(boolean isFinal) { setFlag(isFinal, IS_FINAL_MASK); } @Override public boolean isSyntheticJSP() { return checkFlag(IS_SYNTHETIC_JSP_ELEMENT_MASK); } private void setSyntheticJSP(boolean b) { setFlag(b, IS_SYNTHETIC_JSP_ELEMENT_MASK); } @NotNull @Override public synchronized String getAccessModifier() { long access_id = myFlags & ACCESS_MODIFIER_MASK; if (access_id == ACCESS_PRIVATE) return PsiModifier.PRIVATE; if (access_id == ACCESS_PUBLIC) return PsiModifier.PUBLIC; if (access_id == ACCESS_PACKAGE) return PsiModifier.PACKAGE_LOCAL; return PsiModifier.PROTECTED; } public void setAccessModifier(String am) { doSetAccessModifier(am); } private synchronized void doSetAccessModifier(@NotNull String am) { final int access_id; if (PsiModifier.PRIVATE.equals(am)) { access_id = ACCESS_PRIVATE; } else if (PsiModifier.PUBLIC.equals(am)) { access_id = ACCESS_PUBLIC; } else if (PsiModifier.PACKAGE_LOCAL.equals(am)) { access_id = ACCESS_PACKAGE; } else { access_id = ACCESS_PROTECTED; } myFlags = myFlags & ~ACCESS_MODIFIER_MASK | access_id; } public boolean isSuspiciousRecursive() { return isCalledOnlyFrom(this, new Stack<>()); } private boolean isCalledOnlyFrom(RefJavaElement refElement, Stack<RefJavaElement> callStack) { if (callStack.contains(this)) return refElement == this; if (getInReferences().isEmpty()) return false; if (refElement instanceof RefMethod) { RefMethod refMethod = (RefMethod) refElement; for (RefMethod refSuper : refMethod.getSuperMethods()) { if (!refSuper.getInReferences().isEmpty()) return false; } if (refMethod.isConstructor()){ boolean unreachable = true; for (RefElement refOut : refMethod.getOutReferences()){ unreachable &= !refOut.isReachable(); } if (unreachable) return true; } } callStack.push(this); for (RefElement refCaller : getInReferences()) { if (!((RefElementImpl)refCaller).isSuspicious() || !(refCaller instanceof RefJavaElementImpl) || !((RefJavaElementImpl)refCaller).isCalledOnlyFrom(refElement, callStack)) { callStack.pop(); return false; } } callStack.pop(); return true; } void addReference(RefElement refWhat, PsiElement psiWhat, UElement from, boolean forWriting, boolean forReading, UExpression expression) { PsiElement psiFrom = from.getPsi(); if (refWhat != null) { if (refWhat instanceof RefParameter) { if (forWriting) { ((RefParameter)refWhat).parameterReferenced(true); } if (forReading) { ((RefParameter)refWhat).parameterReferenced(false); } } addOutReference(refWhat); if (refWhat instanceof RefJavaFileImpl) { ((RefJavaFileImpl)refWhat).addInReference(this); getRefManager().fireNodeMarkedReferenced(psiWhat, psiFrom); } else if (refWhat instanceof RefJavaElementImpl) { ((RefJavaElementImpl)refWhat).markReferenced(this, forWriting, forReading, expression); } } else { if (psiWhat instanceof PsiMethod) { markEnumUsedIfValuesMethod((PsiMethod)psiWhat, expression); } getRefManager().fireNodeMarkedReferenced(psiWhat, psiFrom); } } protected void markReferenced(@NotNull RefElementImpl refFrom, boolean forWriting, boolean forReading, @Nullable UExpression expressionFrom) { addInReference(refFrom); setForbidProtectedAccess(refFrom, expressionFrom); getRefManager().fireNodeMarkedReferenced(this, refFrom, false, forReading, forWriting, expressionFrom == null ? null : expressionFrom.getSourcePsi()); } void setForbidProtectedAccess(RefElementImpl refFrom, @Nullable UExpression expressionFrom) { if (!checkFlag(FORBID_PROTECTED_ACCESS_MASK) && (expressionFrom instanceof UQualifiedReferenceExpression || expressionFrom instanceof UCallExpression && ((UCallExpression)expressionFrom).getKind() == UastCallKind.CONSTRUCTOR_CALL)) { waitForInitialized(); refFrom.waitForInitialized(); if (RefJavaUtil.getPackage(refFrom) != RefJavaUtil.getPackage(this)) { setFlag(true, FORBID_PROTECTED_ACCESS_MASK); } } } public boolean isProtectedAccessForbidden() { return checkFlag(FORBID_PROTECTED_ACCESS_MASK); } RefJavaManager getRefJavaManager() { return getRefManager().getExtension(RefJavaManager.MANAGER); } @Override public void referenceRemoved() { super.referenceRemoved(); if (isEntry()) { getRefJavaManager().getEntryPointsManager().removeEntryPoint(this); } } @Override public Icon getIcon(final boolean expanded) { if (isSyntheticJSP()) { final PsiElement element = getPsiElement(); if (element != null && element.isValid()) { IconManager iconManager = IconManager.getInstance(); if (iconManager instanceof CoreAwareIconManager) { return ((CoreAwareIconManager)iconManager).getIcon(element.getContainingFile().getVirtualFile(), Iconable.ICON_FLAG_VISIBILITY | Iconable.ICON_FLAG_READ_STATUS, element.getProject()); } } } return super.getIcon(expanded); } private void markEnumUsedIfValuesMethod(PsiMethod psiWhat, UExpression expression) { //TODO support kotlin enums final PsiClass containingClass = psiWhat.getContainingClass(); if (containingClass != null && containingClass.isEnum() && "values".equals(psiWhat.getName())) { for (PsiField enumConstant : containingClass.getFields()) { if (enumConstant instanceof PsiEnumConstant) { final RefJavaElementImpl enumConstantReference = (RefJavaElementImpl)getRefManager().getReference(enumConstant); if (enumConstantReference != null) { addOutReference(enumConstantReference); enumConstantReference.markReferenced(this, false, true, expression); } } } } } }
/*<license> Copyright 2005 - $Date$ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.vernacular.persistence_III.junit.hibernate3; import static org.junit.Assert.fail; import static org.ppwcode.metainfo_I.License.Type.APACHE_V2; import java.io.Serializable; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.Criteria; import org.hibernate.HibernateException; import org.hibernate.ObjectNotFoundException; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import org.hibernate.cfg.Configuration; import org.hibernate.criterion.Order; import org.junit.AfterClass; import org.junit.BeforeClass; import org.ppwcode.metainfo_I.Copyright; import org.ppwcode.metainfo_I.License; import org.ppwcode.metainfo_I.vcs.SvnInfo; import org.ppwcode.vernacular.exception_II.ExternalError; import org.ppwcode.vernacular.persistence_III.PersistentBean; import org.ppwcode.vernacular.persistence_III.dao.hibernate3.Hibernate3PagingList; /** * A simple helper class for hibernate actions within jUnit tests. * * @author David Van Keer * @author Jan Dockx * @author Tom Mahieu * @author Peopleware n.v. */ @Copyright("2004 - $Date$, PeopleWare n.v.") @License(APACHE_V2) @SvnInfo(revision = "$Revision$", date = "$Date$") public abstract class AbstractHibernate3Test { private static final Log _LOG = LogFactory.getLog(AbstractHibernate3Test.class); private static SessionFactory $sessionFactory; private static final String JUNIT_CONFIG_FILE_LOCATION = "/hibernate2_junit.cfg.xml"; @BeforeClass public void initSessionFactory() throws HibernateException { _LOG.debug("reading Hibernate config from " + JUNIT_CONFIG_FILE_LOCATION); Configuration configuration = new Configuration(); configuration.configure(JUNIT_CONFIG_FILE_LOCATION); $sessionFactory = configuration.buildSessionFactory(); _LOG.debug("Hibernate config read ok."); } @AfterClass public void deinitSessionFactory() { _LOG.debug("discarding hibernate session factory"); $sessionFactory = null; _LOG.debug("hibernate session factory discarded"); } public void openSession() { try { $session = $sessionFactory.openSession(); } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Couldn't open a new hibernate session."); } } public void closeSession() { try { $session.close(); $session = null; } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to close the hibernate session."); } } public void beginTransaction() { try { $tx = $session.beginTransaction(); } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Couldn't start a hibernate transaction."); } } public void commitTransaction() { try { $tx.commit(); $tx = null; } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to commit the hibernate transaction."); } } public void rollbackTransaction() { try { $tx.rollback(); $tx = null; } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to cancel the hibernate transaction."); } } public Object create(final Object object) { try { $session.save(object); if (object instanceof PersistentBean) { return ((PersistentBean<?, ?>)object).getId(); } else { return null; } } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to create the object in the database."); } return null; } public void update(final Object object) { try { $session.update(object); } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to update the object in the database."); } } public void delete(final Object object) { try { $session.delete(object); } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to delete the object to the database."); } } public Object retrieve(final Class<?> clazz, final Serializable id) { Object result = null; try { result = $session.load(clazz, id); } catch (ObjectNotFoundException onfExc) { return null; } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to retrieve the object from the database."); } return result; } public <_PersistentObject_> Set<_PersistentObject_> retrieve(final Class<_PersistentObject_> persistentObjectType) { Criteria crit = $session.createCriteria(persistentObjectType); @SuppressWarnings("unchecked") Set<_PersistentObject_> retrieve = (Set<_PersistentObject_>)retrieve(crit); return retrieve; } public final static int DEFAULT_PAGE_SIZE = 100; public int getPageSize() { return DEFAULT_PAGE_SIZE; } public <_Id_ extends Serializable, _PersistentBean_ extends PersistentBean<_Id_, ?>> Hibernate3PagingList<_Id_, _PersistentBean_> retrievePages(final Class<_PersistentBean_> persistentObjectType) { try { Query cq = $session.createQuery("select count(*) from " + persistentObjectType.getName()); Criteria crit = $session.createCriteria(persistentObjectType); crit.addOrder(Order.asc("id")); return new Hibernate3PagingList<_Id_, _PersistentBean_>(crit, cq, getPageSize()); } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to retrieve objects from database"); return null; } catch (ExternalError peErr) { peErr.printStackTrace(); fail("Failed to retrieve objects from database"); return null; } } public Set<?> retrieve(final Criteria criteria) { Set<Object> results = new HashSet<Object>(); try { List<?> list = criteria.list(); results.addAll(list); } catch (HibernateException hExc) { hExc.printStackTrace(); fail("Failed to retrieve objects from database"); } return results; } public List<?> retrieve(String HqlQueryString) { List<?> roles = null; try { Query q = getSession().createQuery(HqlQueryString); roles = q.list(); } catch (HibernateException e) { assert false : "HibernateExceptionshould not happen: " + e; } return roles; } public Session getSession() { return $session; } private Session $session; public Transaction getTransaction() { return $tx; } private Transaction $tx; }
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugins.ide.eclipse.model; import com.google.common.base.Preconditions; import groovy.lang.Closure; import org.gradle.api.Action; import org.gradle.api.Incubating; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.internal.file.FileTreeInternal; import org.gradle.api.internal.project.ProjectInternal; import org.gradle.api.internal.tasks.compile.CompilationSourceDirs; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.provider.Property; import org.gradle.api.provider.SetProperty; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.compile.JavaCompile; import org.gradle.internal.jvm.JavaModuleDetector; import org.gradle.internal.xml.XmlTransformer; import org.gradle.plugins.ide.api.XmlFileContentMerger; import org.gradle.plugins.ide.eclipse.model.internal.ClasspathFactory; import org.gradle.plugins.ide.eclipse.model.internal.FileReferenceFactory; import org.gradle.plugins.ide.internal.IdeArtifactRegistry; import org.gradle.plugins.ide.internal.resolver.DefaultGradleApiSourcesResolver; import org.gradle.util.internal.ConfigureUtil; import javax.inject.Inject; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * The build path settings for the generated Eclipse project. Used by the * {@link org.gradle.plugins.ide.eclipse.GenerateEclipseClasspath} task to generate an Eclipse .classpath file. * <p> * The following example demonstrates the various configuration options. * Keep in mind that all properties have sensible defaults; only configure them explicitly * if the defaults don't match your needs. * * <pre class='autoTested'> * plugins { * id 'java' * id 'eclipse' * } * * configurations { * provided * someBoringConfig * } * * eclipse { * //if you want parts of paths in resulting file to be replaced by variables (files): * pathVariables 'GRADLE_HOME': file('/best/software/gradle'), 'TOMCAT_HOME': file('../tomcat') * * classpath { * //you can tweak the classpath of the Eclipse project by adding extra configurations: * plusConfigurations += [ configurations.provided ] * * //you can also remove configurations from the classpath: * minusConfigurations += [ configurations.someBoringConfig ] * * //if you want to append extra containers: * containers 'someFriendlyContainer', 'andYetAnotherContainer' * * //customizing the classes output directory: * defaultOutputDir = file('build-eclipse') * * //default settings for downloading sources and Javadoc: * downloadSources = true * downloadJavadoc = false * * //if you want to expose test classes to dependent projects * containsTestFixtures = true * * //customizing which Eclipse source directories should be marked as test * testSourceSets = [sourceSets.test] * * //customizing which dependencies should be marked as test on the project's classpath * testConfigurations = [configurations.testCompileClasspath, configurations.testRuntimeClasspath] * } * } * </pre> * * For tackling edge cases, users can perform advanced configuration on the resulting XML file. * It is also possible to affect the way that the Eclipse plugin merges the existing configuration * via beforeMerged and whenMerged closures. * <p> * The beforeMerged and whenMerged closures receive a {@link Classpath} object. * <p> * Examples of advanced configuration: * * <pre class='autoTested'> * plugins { * id 'java' * id 'eclipse' * } * * eclipse { * classpath { * file { * //if you want to mess with the resulting XML in whatever way you fancy * withXml { * def node = it.asNode() * node.appendNode('xml', 'is what I love') * } * * //closure executed after .classpath content is loaded from existing file * //but before gradle build information is merged * beforeMerged { classpath -&gt; * //you can tinker with the {@link Classpath} here * } * * //closure executed after .classpath content is loaded from existing file * //and after gradle build information is merged * whenMerged { classpath -&gt; * //you can tinker with the {@link Classpath} here * } * } * } * } * </pre> */ public class EclipseClasspath { private Iterable<SourceSet> sourceSets; private Collection<Configuration> plusConfigurations = new ArrayList<Configuration>(); private Collection<Configuration> minusConfigurations = new ArrayList<Configuration>(); private Set<String> containers = new LinkedHashSet<String>(); private File defaultOutputDir; private boolean downloadSources = true; private boolean downloadJavadoc; private XmlFileContentMerger file = new XmlFileContentMerger(new XmlTransformer()); private Map<String, File> pathVariables = new HashMap<String, File>(); private boolean projectDependenciesOnly; private List<File> classFolders; private final org.gradle.api.Project project; private final Property<Boolean> containsTestFixtures; private final SetProperty<SourceSet> testSourceSets; private final SetProperty<Configuration> testConfigurations; @Inject public EclipseClasspath(org.gradle.api.Project project) { this.project = project; this.containsTestFixtures = project.getObjects().property(Boolean.class).convention(false); this.testSourceSets = project.getObjects().setProperty(SourceSet.class); this.testConfigurations = project.getObjects().setProperty(Configuration.class); } /** * The source sets to be added. * <p> * See {@link EclipseClasspath} for an example. */ public Iterable<SourceSet> getSourceSets() { return sourceSets; } public void setSourceSets(Iterable<SourceSet> sourceSets) { this.sourceSets = sourceSets; } /** * The configurations whose files are to be added as classpath entries. * <p> * See {@link EclipseClasspath} for an example. */ public Collection<Configuration> getPlusConfigurations() { return plusConfigurations; } public void setPlusConfigurations(Collection<Configuration> plusConfigurations) { this.plusConfigurations = plusConfigurations; } /** * The configurations whose files are to be excluded from the classpath entries. * <p> * See {@link EclipseClasspath} for an example. */ public Collection<Configuration> getMinusConfigurations() { return minusConfigurations; } public void setMinusConfigurations(Collection<Configuration> minusConfigurations) { this.minusConfigurations = minusConfigurations; } /** * The classpath containers to be added. * <p> * See {@link EclipseClasspath} for an example. */ public Set<String> getContainers() { return containers; } public void setContainers(Set<String> containers) { this.containers = containers; } /** * The default output directory where Eclipse puts compiled classes. * <p> * See {@link EclipseClasspath} for an example. */ public File getDefaultOutputDir() { return defaultOutputDir; } public void setDefaultOutputDir(File defaultOutputDir) { this.defaultOutputDir = defaultOutputDir; } /** * Whether to download and associate source Jars with the dependency Jars. Defaults to true. * <p> * See {@link EclipseClasspath} for an example. */ public boolean isDownloadSources() { return downloadSources; } public void setDownloadSources(boolean downloadSources) { this.downloadSources = downloadSources; } /** * Whether to download and associate Javadoc Jars with the dependency Jars. Defaults to false. * <p> * See {@link EclipseClasspath} for an example. */ public boolean isDownloadJavadoc() { return downloadJavadoc; } public void setDownloadJavadoc(boolean downloadJavadoc) { this.downloadJavadoc = downloadJavadoc; } /** * See {@link #file(Action)}. */ public XmlFileContentMerger getFile() { return file; } public void setFile(XmlFileContentMerger file) { this.file = file; } public Map<String, File> getPathVariables() { return pathVariables; } public void setPathVariables(Map<String, File> pathVariables) { this.pathVariables = pathVariables; } public boolean isProjectDependenciesOnly() { return projectDependenciesOnly; } public void setProjectDependenciesOnly(boolean projectDependenciesOnly) { this.projectDependenciesOnly = projectDependenciesOnly; } public List<File> getClassFolders() { return classFolders; } public void setClassFolders(List<File> classFolders) { this.classFolders = classFolders; } public org.gradle.api.Project getProject() { return project; } /** * Further classpath containers to be added. * <p> * See {@link EclipseClasspath} for an example. * * @param containers the classpath containers to be added */ public void containers(String... containers) { Preconditions.checkNotNull(containers); this.containers.addAll(Arrays.asList(containers)); } /** * Enables advanced configuration like tinkering with the output XML or affecting the way * that the contents of an existing .classpath file is merged with Gradle build information. * The object passed to the whenMerged{} and beforeMerged{} closures is of type {@link Classpath}. * <p> * See {@link EclipseProject} for an example. */ public void file(Closure closure) { ConfigureUtil.configure(closure, file); } /** * Enables advanced configuration like tinkering with the output XML or affecting the way * that the contents of an existing .classpath file is merged with Gradle build information. * The object passed to the whenMerged{} and beforeMerged{} closures is of type {@link Classpath}. * <p> * See {@link EclipseProject} for an example. * * @since 3.5 */ public void file(Action<? super XmlFileContentMerger> action) { action.execute(file); } /** * Calculates, resolves and returns dependency entries of this classpath. */ public List<ClasspathEntry> resolveDependencies() { ProjectInternal projectInternal = (ProjectInternal) this.project; IdeArtifactRegistry ideArtifactRegistry = projectInternal.getServices().get(IdeArtifactRegistry.class); boolean inferModulePath = false; Task javaCompileTask = project.getTasks().findByName(JavaPlugin.COMPILE_JAVA_TASK_NAME); if (javaCompileTask instanceof JavaCompile) { JavaCompile javaCompile = (JavaCompile) javaCompileTask; inferModulePath = javaCompile.getModularity().getInferModulePath().get(); if (inferModulePath) { List<File> sourceRoots = CompilationSourceDirs.inferSourceRoots((FileTreeInternal) javaCompile.getSource()); inferModulePath = JavaModuleDetector.isModuleSource(true, sourceRoots); } } ClasspathFactory classpathFactory = new ClasspathFactory(this, ideArtifactRegistry, new DefaultGradleApiSourcesResolver(projectInternal.newDetachedResolver()), inferModulePath); return classpathFactory.createEntries(); } @SuppressWarnings("unchecked") public void mergeXmlClasspath(Classpath xmlClasspath) { file.getBeforeMerged().execute(xmlClasspath); List<ClasspathEntry> entries = resolveDependencies(); xmlClasspath.configure(entries); file.getWhenMerged().execute(xmlClasspath); } public FileReferenceFactory getFileReferenceFactory() { FileReferenceFactory referenceFactory = new FileReferenceFactory(); for (Map.Entry<String, File> entry : pathVariables.entrySet()) { referenceFactory.addPathVariable(entry.getKey(), entry.getValue()); } return referenceFactory; } /** * Returns {@code true} if the classpath contains test fixture classes that should be visible * through incoming project dependencies. * * @since 6.8 */ @Incubating public Property<Boolean> getContainsTestFixtures() { return containsTestFixtures; } /** * Returns the test source sets. * <p> * The source directories in the returned source sets are marked with the 'test' classpath attribute on the Eclipse classpath. * <p> * The default value contains the following elements: * <ul> * <li>All source sets with names containing the 'test' substring (case ignored)</li> * <li>All source sets defined via the jvm-test-suite DSL</li> * </ul> * * @since 7.5 */ @Incubating public SetProperty<SourceSet> getTestSourceSets() { return testSourceSets; } /** * Returns the test configurations. * <p> * All resolved dependencies that appear only in the returned dependency configurations are marked with the 'test' classpath attribute on the Eclipse classpath. * <p> * The default value contains the following elements: * <ul> * <li>The compile and runtime configurations of the {@link #testSourceSets}, including the jvm-test-suite source sets</li> * <li>Other configurations with names containing the 'test' substring (case ignored)</li> * </ul> * <p> * Note, that this property should contain resolvable configurations only. * * @since 7.5 */ @Incubating public SetProperty<Configuration> getTestConfigurations() { return testConfigurations; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.tasks; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.time.Deadline; import org.apache.flink.util.Preconditions; import org.apache.flink.util.concurrent.NeverCompleteFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Duration; import java.util.concurrent.BlockingQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.apache.flink.util.Preconditions.checkNotNull; /** * A {@link ProcessingTimeService} which assigns as current processing time the result of calling * {@link System#currentTimeMillis()} and registers timers using a {@link ScheduledThreadPoolExecutor}. */ @Internal public class SystemProcessingTimeService extends ProcessingTimeService { private static final Logger LOG = LoggerFactory.getLogger(SystemProcessingTimeService.class); private static final int STATUS_ALIVE = 0; private static final int STATUS_QUIESCED = 1; private static final int STATUS_SHUTDOWN = 2; // ------------------------------------------------------------------------ /** The executor service that schedules and calls the triggers of this task. */ private final ScheduledThreadPoolExecutor timerService; private final ScheduledCallbackExecutionContext callbackExecutionContext; private final AtomicInteger status; @VisibleForTesting SystemProcessingTimeService(ScheduledCallbackExecutionContext callbackExecutionContext) { this(callbackExecutionContext, null); } SystemProcessingTimeService(ScheduledCallbackExecutionContext callbackExecutionContext, ThreadFactory threadFactory) { this.callbackExecutionContext = checkNotNull(callbackExecutionContext); this.status = new AtomicInteger(STATUS_ALIVE); if (threadFactory == null) { this.timerService = new ScheduledThreadPoolExecutor(1); } else { this.timerService = new ScheduledThreadPoolExecutor(1, threadFactory); } // tasks should be removed if the future is canceled this.timerService.setRemoveOnCancelPolicy(true); // make sure shutdown removes all pending tasks this.timerService.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); this.timerService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); } @Override public long getCurrentProcessingTime() { return System.currentTimeMillis(); } /** * Registers a task to be executed no sooner than time {@code timestamp}, but without strong * guarantees of order. * * @param timestamp Time when the task is to be enabled (in processing time) * @param callback The task to be executed * @return The future that represents the scheduled task. This always returns some future, * even if the timer was shut down */ @Override public ScheduledFuture<?> registerTimer(long timestamp, ProcessingTimeCallback callback) { // delay the firing of the timer by 1 ms to align the semantics with watermark. A watermark // T says we won't see elements in the future with a timestamp smaller or equal to T. // With processing time, we therefore need to delay firing the timer by one ms. long delay = Math.max(timestamp - getCurrentProcessingTime(), 0) + 1; // we directly try to register the timer and only react to the status on exception // that way we save unnecessary volatile accesses for each timer try { return timerService.schedule(wrapOnTimerCallback(callback, timestamp), delay, TimeUnit.MILLISECONDS); } catch (RejectedExecutionException e) { final int status = this.status.get(); if (status == STATUS_QUIESCED) { return new NeverCompleteFuture(delay); } else if (status == STATUS_SHUTDOWN) { throw new IllegalStateException("Timer service is shut down"); } else { // something else happened, so propagate the exception throw e; } } } @Override public ScheduledFuture<?> scheduleAtFixedRate(ProcessingTimeCallback callback, long initialDelay, long period) { long nextTimestamp = getCurrentProcessingTime() + initialDelay; // we directly try to register the timer and only react to the status on exception // that way we save unnecessary volatile accesses for each timer try { return timerService.scheduleAtFixedRate( wrapOnTimerCallback(callback, nextTimestamp, period), initialDelay, period, TimeUnit.MILLISECONDS); } catch (RejectedExecutionException e) { final int status = this.status.get(); if (status == STATUS_QUIESCED) { return new NeverCompleteFuture(initialDelay); } else if (status == STATUS_SHUTDOWN) { throw new IllegalStateException("Timer service is shut down"); } else { // something else happened, so propagate the exception throw e; } } } /** * @return {@code true} is the status of the service * is {@link #STATUS_ALIVE}, {@code false} otherwise. */ @VisibleForTesting boolean isAlive() { return status.get() == STATUS_ALIVE; } @Override public boolean isTerminated() { return status.get() == STATUS_SHUTDOWN; } @Override public void quiesce() throws InterruptedException { if (status.compareAndSet(STATUS_ALIVE, STATUS_QUIESCED)) { timerService.shutdown(); } } @Override public void awaitPendingAfterQuiesce() throws InterruptedException { if (!timerService.isTerminated()) { Preconditions.checkState(timerService.isTerminating() || timerService.isShutdown()); // await forever (almost) timerService.awaitTermination(365L, TimeUnit.DAYS); } } @Override public void shutdownService() { if (status.compareAndSet(STATUS_ALIVE, STATUS_SHUTDOWN) || status.compareAndSet(STATUS_QUIESCED, STATUS_SHUTDOWN)) { timerService.shutdownNow(); } } @Override public boolean shutdownAndAwaitPending(long time, TimeUnit timeUnit) throws InterruptedException { shutdownService(); return timerService.awaitTermination(time, timeUnit); } @Override public boolean shutdownServiceUninterruptible(long timeoutMs) { final Deadline deadline = Deadline.fromNow(Duration.ofMillis(timeoutMs)); boolean shutdownComplete = false; boolean receivedInterrupt = false; do { try { // wait for a reasonable time for all pending timer threads to finish shutdownComplete = shutdownAndAwaitPending(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException iex) { receivedInterrupt = true; LOG.trace("Intercepted attempt to interrupt timer service shutdown.", iex); } } while (deadline.hasTimeLeft() && !shutdownComplete); if (receivedInterrupt) { Thread.currentThread().interrupt(); } return shutdownComplete; } // safety net to destroy the thread pool @Override protected void finalize() throws Throwable { super.finalize(); timerService.shutdownNow(); } @VisibleForTesting int getNumTasksScheduled() { BlockingQueue<?> queue = timerService.getQueue(); if (queue == null) { return 0; } else { return queue.size(); } } // ------------------------------------------------------------------------ /** * A context to which {@link ProcessingTimeCallback} would be passed to be invoked when a timer is up. */ interface ScheduledCallbackExecutionContext { void invoke(ProcessingTimeCallback callback, long timestamp); } private Runnable wrapOnTimerCallback(ProcessingTimeCallback callback, long timestamp) { return new ScheduledTask(status, callbackExecutionContext, callback, timestamp, 0); } private Runnable wrapOnTimerCallback(ProcessingTimeCallback callback, long nextTimestamp, long period) { return new ScheduledTask(status, callbackExecutionContext, callback, nextTimestamp, period); } private static final class ScheduledTask implements Runnable { private final AtomicInteger serviceStatus; private final ScheduledCallbackExecutionContext callbackExecutionContext; private final ProcessingTimeCallback callback; private long nextTimestamp; private final long period; ScheduledTask( AtomicInteger serviceStatus, ScheduledCallbackExecutionContext callbackExecutionContext, ProcessingTimeCallback callback, long timestamp, long period) { this.serviceStatus = serviceStatus; this.callbackExecutionContext = callbackExecutionContext; this.callback = callback; this.nextTimestamp = timestamp; this.period = period; } @Override public void run() { if (serviceStatus.get() != STATUS_ALIVE) { return; } callbackExecutionContext.invoke(callback, nextTimestamp); nextTimestamp += period; } } }
// ---------------------------------------------------------------------------- // Copyright 2007-2014, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Change History: // 2007/01/25 Martin D. Flynn // -Initial release // 2007/04/01 Martin D. Flynn // -Added "Distance Units" field // 2007/06/03 Martin D. Flynn // -Added I18N support // 2007/06/13 Martin D. Flynn // -Added support for browsers with disabled cookies // 2007/07/27 Martin D. Flynn // -Added 'getNavigationTab(...)' // 2007/09/16 Martin D. Flynn // -Fixed GeocoderMode field to display the proper value from the table // 2007/11/28 Martin D. Flynn // -Added 'Notify EMail' address field // -Invalid entries are now indicated on the page (previously they were // quietly ignored). // 2008/10/16 Martin D. Flynn // -Update with new ACL usage // 2008/12/01 Martin D. Flynn // -Added temperature units // 2009/01/01 Martin D. Flynn // -Added 'Plural' field for Device/Group titles. // 2010/04/11 Martin D. Flynn // -Added "Enable Border Crossing" // -Added "Pressure Units" selection // 2011/03/08 Martin D. Flynn // -Moved GeocoderMode and isBorderCrossing to SysAdminAccounts admin. // 2011/07/01 Martin D. Flynn // -Updated call to getDeviceTitles/getDeviceGroupTitles to not return the // standard default titles. // ---------------------------------------------------------------------------- package org.opengts.war.track.page; import java.util.Locale; import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import org.opengts.util.*; import org.opengts.dbtools.*; import org.opengts.db.*; import org.opengts.db.tables.*; import org.opengts.war.tools.*; import org.opengts.war.track.Calendar; import org.opengts.war.track.*; public class AccountInfo extends WebPageAdaptor implements Constants { // ------------------------------------------------------------------------ // Parameters // commands public static final String COMMAND_INFO_UPDATE = "update"; // submit types public static final String PARM_SUBMIT_CHANGE = "a_subchg"; // button types public static final String PARM_BUTTON_CANCEL = "a_btncan"; public static final String PARM_BUTTON_BACK = "a_btnbak"; // parameters public static final String PARM_ACCT_ID = "a_id"; public static final String PARM_ACCT_DESC = "a_desc"; public static final String PARM_CONTACT_NAME = "a_contact"; public static final String PARM_CONTACT_PHONE = "a_phone"; public static final String PARM_CONTACT_EMAIL = "a_email"; public static final String PARM_NOTIFY_EMAIL = "a_notify"; public static final String PARM_TIMEZONE = "a_tmz"; public static final String PARM_SPEED_UNITS = "a_spdun"; public static final String PARM_DIST_UNITS = "a_dstun"; public static final String PARM_VOLM_UNITS = "a_volun"; public static final String PARM_ECON_UNITS = "a_ecoun"; public static final String PARM_PRESS_UNITS = "a_presun"; public static final String PARM_TEMP_UNITS = "a_tempun"; public static final String PARM_LATLON_FORMAT = "a_latlon"; public static final String PARM_DEVICE_TITLE = "a_devtitle"; public static final String PARM_DEVICES_TITLE = "a_devstitle"; public static final String PARM_GROUP_TITLE = "a_grptitle"; public static final String PARM_GROUPS_TITLE = "a_grpstitle"; public static final String PARM_ADDRESS_TITLE = "a_adrtitle"; public static final String PARM_ADDRESSES_TITLE = "a_adrstitle"; public static final String PARM_ACCT_EXPIRE = "a_expire"; public static final String PARM_MAX_PINGS = "a_maxPing"; public static final String PARM_TOT_PINGS = "a_totPing"; public static final String PARM_DEFAULT_USER = "a_dftuser"; // ------------------------------------------------------------------------ // WebPage interface public AccountInfo() { this.setBaseURI(RequestProperties.TRACK_BASE_URI()); this.setPageName(PAGE_ACCOUNT_INFO); this.setPageNavigation(new String[] { PAGE_LOGIN, PAGE_MENU_TOP }); this.setLoginRequired(true); } // ------------------------------------------------------------------------ public String getMenuName(RequestProperties reqState) { return MenuBar.MENU_ADMIN; } public String getMenuDescription(RequestProperties reqState, String parentMenuName) { PrivateLabel privLabel = reqState.getPrivateLabel(); I18N i18n = privLabel.getI18N(AccountInfo.class); return super._getMenuDescription(reqState,i18n.getString("AccountInfo.editMenuDesc","View/Edit Account Information")); } public String getMenuHelp(RequestProperties reqState, String parentMenuName) { PrivateLabel privLabel = reqState.getPrivateLabel(); I18N i18n = privLabel.getI18N(AccountInfo.class); return super._getMenuHelp(reqState,i18n.getString("AccountInfo.editMenuHelp","View and Edit the current Account information")); } // ------------------------------------------------------------------------ public String getNavigationDescription(RequestProperties reqState) { PrivateLabel privLabel = reqState.getPrivateLabel(); I18N i18n = privLabel.getI18N(AccountInfo.class); return super._getNavigationDescription(reqState,i18n.getString("AccountInfo.navDesc","Account")); } public String getNavigationTab(RequestProperties reqState) { PrivateLabel privLabel = reqState.getPrivateLabel(); I18N i18n = privLabel.getI18N(AccountInfo.class); return i18n.getString("AccountInfo.navTab","Account Admin"); } // ------------------------------------------------------------------------ public void writePage( final RequestProperties reqState, String pageMsg) throws IOException { final PrivateLabel privLabel = reqState.getPrivateLabel(); final I18N i18n = privLabel.getI18N(AccountInfo.class); final Locale locale = reqState.getLocale(); final Account currAcct = reqState.getCurrentAccount(); final User currUser = reqState.getCurrentUser(); final String pageName = this.getPageName(); String m = pageMsg; boolean error = false; /* ACL allow edit/view */ boolean allowEdit = privLabel.hasWriteAccess(currUser, this.getAclName()); boolean allowView = allowEdit || privLabel.hasReadAccess(currUser, this.getAclName()); /* command */ String accountCmd = reqState.getCommandName(); boolean updateAccount = accountCmd.equals(COMMAND_INFO_UPDATE); /* change the account info? */ if (updateAccount) { HttpServletRequest request = reqState.getHttpServletRequest(); String submit = AttributeTools.getRequestString(request, PARM_SUBMIT_CHANGE, ""); if (SubmitMatch(submit,i18n.getString("AccountInfo.change","Change"))) { String acctName = AttributeTools.getRequestString(request, PARM_ACCT_DESC,""); String contactName = AttributeTools.getRequestString(request, PARM_CONTACT_NAME,""); String contactPhone = AttributeTools.getRequestString(request, PARM_CONTACT_PHONE,""); String contactEmail = AttributeTools.getRequestString(request, PARM_CONTACT_EMAIL,""); String notifyEmail = AttributeTools.getRequestString(request, PARM_NOTIFY_EMAIL,""); String timeZone = AttributeTools.getRequestString(request, PARM_TIMEZONE,""); String speedUnits = AttributeTools.getRequestString(request, PARM_SPEED_UNITS,""); String distUnits = AttributeTools.getRequestString(request, PARM_DIST_UNITS,""); String volUnits = AttributeTools.getRequestString(request, PARM_VOLM_UNITS,""); String econUnits = AttributeTools.getRequestString(request, PARM_ECON_UNITS,""); String pressUnits = AttributeTools.getRequestString(request, PARM_PRESS_UNITS,""); String tempUnits = AttributeTools.getRequestString(request, PARM_TEMP_UNITS,""); String latLonFormat = AttributeTools.getRequestString(request, PARM_LATLON_FORMAT,""); String deviceTitle = AttributeTools.getRequestString(request, PARM_DEVICE_TITLE,""); String devicesTitle = AttributeTools.getRequestString(request, PARM_DEVICES_TITLE,""); String groupTitle = AttributeTools.getRequestString(request, PARM_GROUP_TITLE,""); String groupsTitle = AttributeTools.getRequestString(request, PARM_GROUPS_TITLE,""); String addrTitle = AttributeTools.getRequestString(request, PARM_ADDRESS_TITLE,""); String addrsTitle = AttributeTools.getRequestString(request, PARM_ADDRESSES_TITLE,""); String defaultUser = AttributeTools.getRequestString(request, PARM_DEFAULT_USER,""); try { boolean saveOK = true; // description if (!StringTools.isBlank(acctName)) { currAcct.setDescription(acctName); } else { currAcct.setDescription(currAcct.getAccountID()); } // contact name currAcct.setContactName(contactName); // contact phone currAcct.setContactPhone(contactPhone); // contact email if (StringTools.isBlank(contactEmail) || EMail.validateAddress(contactEmail)) { currAcct.setContactEmail(contactEmail); } else { Print.logWarn("Contact EMail address is invalid: " + contactEmail); m = i18n.getString("AccountInfo.pleaseEnterContactEMail","Please enter a valid contact email address"); // UserErrMsg error = true; saveOK = false; } // notify email if (StringTools.isBlank(notifyEmail)) { if (!currAcct.getNotifyEmail().equals(notifyEmail)) { currAcct.setNotifyEmail(notifyEmail); } } else if (EMail.validateAddresses(notifyEmail,true/*acceptSMS*/)) { if (!currAcct.getNotifyEmail().equals(notifyEmail)) { currAcct.setNotifyEmail(notifyEmail); } } else { m = i18n.getString("AccountInfo.pleaseEnterNotifyEMail","Please enter a valid notify email/sms address"); // UserErrMsg error = true; saveOK = false; } // timezone currAcct.setTimeZone(timeZone); // speed units currAcct.setSpeedUnits(speedUnits, locale); // distance units currAcct.setDistanceUnits(distUnits, locale); // volume units currAcct.setVolumeUnits(volUnits, locale); // economy units currAcct.setEconomyUnits(econUnits, locale); // pressure units currAcct.setPressureUnits(pressUnits, locale); // temperature units currAcct.setTemperatureUnits(tempUnits, locale); // latitude/longitude format currAcct.setLatLonFormat(latLonFormat, locale); // reverse-geocoder mode //currAcct.setGeocoderMode(revGeoMode, locale); // 'Device' title String devSingTitle = deviceTitle; String devPlurTitle = devicesTitle; currAcct.setDeviceTitle(devSingTitle, devPlurTitle); // 'DeviceGroup' title String grpSingTitle = groupTitle; String grpPlurTitle = groupsTitle; currAcct.setDeviceGroupTitle(grpSingTitle, grpPlurTitle); // 'Address' title String adrSingTitle = addrTitle; String adrPlurTitle = addrsTitle; currAcct.setAddressTitle(adrSingTitle, adrPlurTitle); // default user currAcct.setDefaultUser(defaultUser); // save if (saveOK) { /* exclude fields that only the SysAdmin/AccountManager should change */ currAcct.addExcludedUpdateFields( Account.FLD_isActive, Account.FLD_isAccountManager, Account.FLD_managerID, Account.FLD_privateLabelName, Account.FLD_isBorderCrossing, Account.FLD_geocoderMode ); currAcct.save(); AttributeTools.setSessionAttribute(request, Calendar.PARM_TIMEZONE[0], timeZone); //Track.writeMessageResponse(reqState, i18n.getString("AccountInfo.updatedAcct","Account information updated")); m = i18n.getString("AccountInfo.updatedAcct","Account information updated"); // UserErrMsg } } catch (Throwable t) { Print.logException("Updating Account", t); m = i18n.getString("AccountInfo.errorUpdate","Internal error updating Account"); // UserErrMsg error = true; return; } } } /* Style */ HTMLOutput HTML_CSS = new HTMLOutput() { public void write(PrintWriter out) throws IOException { String cssDir = AccountInfo.this.getCssDirectory(); //WebPageAdaptor.writeCssLink(out, reqState, "AccountInfo.css", cssDir); } }; /* javascript */ HTMLOutput HTML_JS = new HTMLOutput() { public void write(PrintWriter out) throws IOException { MenuBar.writeJavaScript(out, pageName, reqState); } }; /* Content */ final boolean _allowEdit = allowEdit; final ComboMap _tzList = privLabel.getTimeZoneComboMap(); final ComboMap _suList = privLabel.getEnumComboMap(Account.SpeedUnits.class ); final ComboMap _duList = privLabel.getEnumComboMap(Account.DistanceUnits.class ); final ComboMap _vuList = privLabel.getEnumComboMap(Account.VolumeUnits.class ); final ComboMap _ecList = privLabel.getEnumComboMap(Account.EconomyUnits.class ); final ComboMap _puList = privLabel.getEnumComboMap(Account.PressureUnits.class ); final ComboMap _tuList = privLabel.getEnumComboMap(Account.TemperatureUnits.class); final ComboMap _llList = privLabel.getEnumComboMap(Account.LatLonFormat.class ); //final ComboMap _rgList = privLabel.getEnumComboMap(Account.GeocoderMode.class ); final ComboMap _ynList = ComboMap.getYesNoMap(locale); HTMLOutput HTML_CONTENT = new HTMLOutput(CommonServlet.CSS_CONTENT_FRAME, m) { public void write(PrintWriter out) throws IOException { //Print.logStackTrace("here"); //String menuURL = EncodeMakeURL(reqState,RequestProperties.TRACK_BASE_URI(),PAGE_MENU_TOP); String menuURL = privLabel.getWebPageURL(reqState, PAGE_MENU_TOP); //String chgURL = EncodeMakeURL(reqState,RequestProperties.TRACK_BASE_URI(),pageName,COMMAND_INFO_UPDATE); String chgURL = privLabel.getWebPageURL(reqState, pageName, COMMAND_INFO_UPDATE); String frameTitle = _allowEdit? i18n.getString("AccountInfo.editAccount","Edit Account Information") : i18n.getString("AccountInfo.viewAccount","View Account Information"); // frame content ComboOption speedUnits = privLabel.getEnumComboOption(Account.getSpeedUnits(currAcct) ); ComboOption distanceUnits = privLabel.getEnumComboOption(Account.getDistanceUnits(currAcct) ); ComboOption volumeUnits = privLabel.getEnumComboOption(Account.getVolumeUnits(currAcct) ); ComboOption economyUnits = privLabel.getEnumComboOption(Account.getEconomyUnits(currAcct) ); ComboOption pressureUnits = privLabel.getEnumComboOption(Account.getPressureUnits(currAcct) ); ComboOption temperatureUnits = privLabel.getEnumComboOption(Account.getTemperatureUnits(currAcct)); ComboOption latLonFormat = privLabel.getEnumComboOption(Account.getLatLonFormat(currAcct) ); //ComboOption geocoderMode = privLabel.getEnumComboOption(Account.getGeocoderMode(currAcct) ); String devTitles[] = currAcct.getDeviceTitles(locale, new String[]{"",""}); String grpTitles[] = currAcct.getDeviceGroupTitles(locale, new String[]{"",""}); String adrTitles[] = currAcct.getAddressTitles(locale, new String[]{"",""}); out.println("<span class='"+CommonServlet.CSS_MENU_TITLE+"'>"+frameTitle+"</span><br/>"); out.println("<hr/>"); out.println("<form name='AccountInfo' method='post' action='"+chgURL+"' target='_self'>"); out.println("<table class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE+"' cellspacing='0' callpadding='0' border='0'>"); out.println(FormRow_TextField(PARM_ACCT_ID , false , i18n.getString("AccountInfo.accountID","Account ID:") , currAcct.getAccountID() , 32, 32)); // read-only out.println(FormRow_TextField(PARM_ACCT_DESC , _allowEdit, i18n.getString("AccountInfo.accountName","Account Description:") , currAcct.getDescription() , 40, 40)); out.println(FormRow_TextField(PARM_CONTACT_NAME , _allowEdit, i18n.getString("AccountInfo.contactName","Contact Name:") , currAcct.getContactName() , 40, 40)); out.println(FormRow_TextField(PARM_CONTACT_PHONE, _allowEdit, i18n.getString("AccountInfo.contactPhone","Contact Phone:") , currAcct.getContactPhone() , 20, 20)); out.println(FormRow_TextField(PARM_CONTACT_EMAIL, _allowEdit, i18n.getString("AccountInfo.contactEMail","Contact Email:") , currAcct.getContactEmail() , 60, 100)); out.println(FormRow_TextField(PARM_NOTIFY_EMAIL , _allowEdit, i18n.getString("AccountInfo.notifyEMail","Notify Email:") , currAcct.getNotifyEmail() , 95, 125)); out.println(FormRow_ComboBox (PARM_TIMEZONE , _allowEdit, i18n.getString("AccountInfo.timeZone","Time Zone:") , currAcct.getTimeZone() , _tzList, null, 20)); out.println(FormRow_ComboBox (PARM_SPEED_UNITS , _allowEdit, i18n.getString("AccountInfo.speedUnits","Speed Units:") , speedUnits , _suList, null, 10)); out.println(FormRow_ComboBox (PARM_DIST_UNITS , _allowEdit, i18n.getString("AccountInfo.distanceUnits","Distance Units:") , distanceUnits , _duList, null, 10)); out.println(FormRow_ComboBox (PARM_VOLM_UNITS , _allowEdit, i18n.getString("AccountInfo.volumeUnits","Volume Units:") , volumeUnits , _vuList, null, 10)); out.println(FormRow_ComboBox (PARM_ECON_UNITS , _allowEdit, i18n.getString("AccountInfo.economyUnits","Economy Units:") , economyUnits , _ecList, null, 10)); out.println(FormRow_ComboBox (PARM_PRESS_UNITS , _allowEdit, i18n.getString("AccountInfo.pressureUnits","Pressure Units:") , pressureUnits , _puList, null, 10)); out.println(FormRow_ComboBox (PARM_TEMP_UNITS , _allowEdit, i18n.getString("AccountInfo.temperatureUnits","Temperature Units:") , temperatureUnits , _tuList, null, 5)); out.println(FormRow_ComboBox (PARM_LATLON_FORMAT, _allowEdit, i18n.getString("AccountInfo.latLonFormat","Latitude/Longitude Format:"), latLonFormat , _llList, null, 15)); /* "Device" title */ out.print ("<tr>"); out.print ("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_HEADER+"'>"+i18n.getString("AccountInfo.deviceTitle","'Device' Title:")+"</td>"); out.print ("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_DATA+"'>"); out.print (Form_TextField(PARM_DEVICE_TITLE , _allowEdit, devTitles[0], 20, 40)); out.print ("<span style='margin-left: 10px;margin-right:5px;'>"+i18n.getString("AccountInfo.plural","Plural:")+"</span>"); out.print (Form_TextField(PARM_DEVICES_TITLE, _allowEdit, devTitles[1], 20, 40)); out.print ("</td>"); out.println("</tr>"); /* "Fleet" title */ out.print ("<tr>"); out.print ("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_HEADER+"'>"+i18n.getString("AccountInfo.groupTitle","'DeviceGroup' Title:")+"</td>"); out.print ("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_DATA+"'>"); out.print (Form_TextField(PARM_GROUP_TITLE , _allowEdit, grpTitles[0], 20, 40)); out.print ("<span style='margin-left: 10px;margin-right:5px;'>"+i18n.getString("AccountInfo.plural","Plural:")+"</span>"); out.print (Form_TextField(PARM_GROUPS_TITLE, _allowEdit, grpTitles[1], 20, 40)); out.print ("</td>"); out.println("</tr>"); /* "Address" title */ out.print ("<tr>"); out.print ("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_HEADER+"'>"+i18n.getString("AccountInfo.addressTitle","'Address' Title:")+"</td>"); out.print ("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_DATA+"'>"); out.print (Form_TextField(PARM_ADDRESS_TITLE , _allowEdit, adrTitles[0], 20, 40)); out.print ("<span style='margin-left: 10px;margin-right:5px;'>"+i18n.getString("AccountInfo.plural","Plural:")+"</span>"); out.print (Form_TextField(PARM_ADDRESSES_TITLE, _allowEdit, adrTitles[1], 20, 40)); out.print ("</td>"); out.println("</tr>"); /* default user */ out.println(FormRow_TextField(PARM_DEFAULT_USER , _allowEdit, i18n.getString("AccountInfo.defaultUser","Default Login UserID:") , currAcct.getDefaultUser() , 20, 32)); /* expiration */ long expireTime = currAcct.getExpirationTime(); if (expireTime > 0L) { String expireTimeStr = reqState.formatDateTime(expireTime); if (StringTools.isBlank(expireTimeStr)) { expireTimeStr = "n/a"; } out.println(FormRow_TextField(PARM_ACCT_EXPIRE , false , i18n.getString("AccountInfo.expiration","Expiration:") , expireTimeStr , 30, 30)); // read-only } /* max pings / total pings */ int maxPingCnt = currAcct.getMaxPingCount(); if (maxPingCnt > 0) { int totPingCnt = currAcct.getTotalPingCount(); int remaining = (maxPingCnt > totPingCnt)? (maxPingCnt - totPingCnt) : 0; out.print ("<tr>"); out.print ("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_HEADER+"'>"+i18n.getString("AccountInfo.maxCommandCount","Max Allowed Commands")+":</td>"); out.print ("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_DATA+"'>"); out.print (Form_TextField(PARM_MAX_PINGS, false, String.valueOf(maxPingCnt), 5, 5)); out.print ("<span style='margin-left: 10px;margin-right:5px;'>"+i18n.getString("AccountInfo.remainingCommands","Remaining Commands")+":</span>"); out.print (Form_TextField(PARM_TOT_PINGS, false, String.valueOf(remaining) , 5, 5)); out.print ("</td>"); out.println("</tr>"); } out.println("</table>"); /* end of form */ out.write("<hr style='margin-bottom:5px;'>\n"); out.write("<span style='padding-left:10px'>&nbsp;</span>\n"); if (_allowEdit) { out.write("<input type='submit' name='"+PARM_SUBMIT_CHANGE+"' value='"+i18n.getString("AccountInfo.change","Change")+"'>\n"); out.write("<span style='padding-left:10px'>&nbsp;</span>\n"); out.write("<input type='button' name='"+PARM_BUTTON_CANCEL+"' value='"+i18n.getString("AccountInfo.cancel","Cancel")+"' onclick=\"javascript:openURL('"+menuURL+"','_self');\">\n"); // target='_top' } else { out.write("<input type='button' name='"+PARM_BUTTON_BACK+"' value='"+i18n.getString("AccountInfo.back","Back")+"' onclick=\"javascript:openURL('"+menuURL+"','_self');\">\n"); // target='_top' } out.write("</form>\n"); } }; /* write frame */ String onload = error? JS_alert(true,m) : null; CommonServlet.writePageFrame( reqState, onload,null, // onLoad/onUnload HTML_CSS, // Style sheets HTML_JS, // JavaScript null, // Navigation HTML_CONTENT); // Content } // ------------------------------------------------------------------------ }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ******************************************************************************/ package org.apache.olingo.odata2.fit.basic; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.olingo.odata2.api.ODataService; import org.apache.olingo.odata2.api.batch.BatchHandler; import org.apache.olingo.odata2.api.batch.BatchRequestPart; import org.apache.olingo.odata2.api.batch.BatchResponsePart; import org.apache.olingo.odata2.api.commons.HttpStatusCodes; import org.apache.olingo.odata2.api.edm.Edm; import org.apache.olingo.odata2.api.edm.provider.EdmProvider; import org.apache.olingo.odata2.api.ep.EntityProvider; import org.apache.olingo.odata2.api.ep.EntityProviderBatchProperties; import org.apache.olingo.odata2.api.exception.ODataException; import org.apache.olingo.odata2.api.processor.ODataRequest; import org.apache.olingo.odata2.api.processor.ODataResponse; import org.apache.olingo.odata2.api.processor.ODataSingleProcessor; import org.apache.olingo.odata2.api.uri.info.GetSimplePropertyUriInfo; import org.apache.olingo.odata2.api.uri.info.PutMergePatchUriInfo; import org.apache.olingo.odata2.core.PathInfoImpl; import org.apache.olingo.odata2.core.ep.util.CircleStreamBuffer; import org.apache.olingo.odata2.core.ep.util.FormatJson; import org.apache.olingo.odata2.core.ep.util.JsonStreamWriter; import org.apache.olingo.odata2.core.processor.ODataSingleProcessorService; import org.apache.olingo.odata2.testutil.helper.StringHelper; import org.apache.olingo.odata2.testutil.mock.MockFacade; import org.apache.olingo.odata2.testutil.server.ServletType; import org.junit.Test; /** * */ public class BasicBatchTest extends AbstractBasicTest { public BasicBatchTest(final ServletType servletType) { super(servletType); } private static final String CRLF = "\r\n"; private static final String REG_EX_BOUNDARY = "(([a-zA-Z0-9_\\-\\.'\\+]{1,70})|\"([a-zA-Z0-9_\\-\\.'\\+\\s\\(\\),/:=\\?]" + "{1,69}[a-zA-Z0-9_\\-\\.'\\+\\(\\),/:=\\?])\")"; private static final String REG_EX = "multipart/mixed;\\s*boundary=" + REG_EX_BOUNDARY + "\\s*"; private static final String REQUEST_PAYLOAD = "--batch_98c1-8b13-36bb" + CRLF + "Content-Type: application/http" + CRLF + "Content-Transfer-Encoding: binary" + CRLF + "Content-Id: mimeHeaderContentId1" + CRLF + CRLF + "GET Employees('1')/EmployeeName HTTP/1.1" + CRLF + "Host: localhost:19000" + CRLF + "Accept: application/atomsvc+xml;q=0.8, application/json;odata=verbose;q=0.5, */*;q=0.1" + CRLF + "Accept-Language: en" + CRLF + "MaxDataServiceVersion: 2.0" + CRLF + "Content-Id: requestHeaderContentId1" + CRLF + CRLF + CRLF + "--batch_98c1-8b13-36bb" + CRLF + "Content-Type: multipart/mixed; boundary=changeset_f980-1cb6-94dd" + CRLF + CRLF + "--changeset_f980-1cb6-94dd" + CRLF + "Content-Type: application/http" + CRLF + "Content-Transfer-Encoding: binary" + CRLF + "Content-Id: mimeHeaderContentId2" + CRLF + CRLF + "PUT Employees('1')/EmployeeName HTTP/1.1" + CRLF + "Host: localhost:19000" + CRLF + "Content-Type: application/json;odata=verbose" + CRLF + "MaxDataServiceVersion: 2.0" + CRLF + "Content-Id: requestHeaderContentId2" + CRLF + CRLF + "{\"EmployeeName\":\"Walter Winter MODIFIED\"}" + CRLF + CRLF + "--changeset_f980-1cb6-94dd--" + CRLF + CRLF + "--batch_98c1-8b13-36bb--"; @Test public void testBatch() throws Exception { final HttpPost post = new HttpPost(URI.create(getEndpoint().toString() + "$batch")); post.setHeader("Content-Type", "multipart/mixed;boundary=batch_98c1-8b13-36bb"); HttpEntity entity = new StringEntity(REQUEST_PAYLOAD); post.setEntity(entity); HttpResponse response = getHttpClient().execute(post); assertNotNull(response); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("HTTP/1.1", response.getProtocolVersion().toString()); assertTrue(response.containsHeader("Content-Length")); assertTrue(response.containsHeader("Content-Type")); assertTrue(response.containsHeader("DataServiceVersion")); assertTrue(response.getEntity().getContentType().getValue().matches(REG_EX)); assertNotNull(response.getEntity().getContent()); String body = StringHelper.inputStreamToString(response.getEntity().getContent(), true); assertTrue(body.contains("Content-Id: mimeHeaderContentId1")); assertTrue(body.contains("Content-Id: requestHeaderContentId1")); assertTrue(body.contains("Content-Id: mimeHeaderContentId2")); assertTrue(body.contains("Content-Id: requestHeaderContentId2")); } @Test public void testBatchUriEncoded() throws Exception { final HttpPost post = new HttpPost(URI.create(getEndpoint().toString() + "%24batch")); post.setHeader("Content-Type", "multipart/mixed;boundary=batch_98c1-8b13-36bb"); HttpEntity entity = new StringEntity(REQUEST_PAYLOAD); post.setEntity(entity); HttpResponse response = getHttpClient().execute(post); assertNotNull(response); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("HTTP/1.1", response.getProtocolVersion().toString()); assertTrue(response.containsHeader("Content-Length")); assertTrue(response.containsHeader("Content-Type")); assertTrue(response.containsHeader("DataServiceVersion")); assertTrue(response.getEntity().getContentType().getValue().matches(REG_EX)); assertNotNull(response.getEntity().getContent()); String body = StringHelper.inputStreamToString(response.getEntity().getContent(), true); assertTrue(body.contains("Content-Id: mimeHeaderContentId1")); assertTrue(body.contains("Content-Id: requestHeaderContentId1")); assertTrue(body.contains("Content-Id: mimeHeaderContentId2")); assertTrue(body.contains("Content-Id: requestHeaderContentId2")); } @Test public void testBatchInvalidContentTypeForPut() throws Exception { final HttpPost post = new HttpPost(URI.create(getEndpoint().toString() + "$batch")); post.setHeader("Content-Type", "multipart/mixed;boundary=batch_98c1-8b13-36bb"); String replacedEntity = REQUEST_PAYLOAD.replace("Content-Type: application/json;odata=verbose" + CRLF, ""); HttpEntity entity = new StringEntity(replacedEntity); post.setEntity(entity); HttpResponse response = getHttpClient().execute(post); assertNotNull(response); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("HTTP/1.1", response.getProtocolVersion().toString()); assertTrue(response.containsHeader("Content-Length")); assertTrue(response.containsHeader("Content-Type")); assertTrue(response.containsHeader("DataServiceVersion")); assertTrue(response.getEntity().getContentType().getValue().matches(REG_EX)); assertNotNull(response.getEntity().getContent()); String body = StringHelper.inputStreamToString(response.getEntity().getContent(), true); assertTrue(body.contains("Content-Id: mimeHeaderContentId1")); assertTrue(body.contains("Content-Id: requestHeaderContentId1")); assertTrue(body.contains("HTTP/1.1 415 Unsupported Media Type")); } static class TestSingleProc extends ODataSingleProcessor { @Override public ODataResponse executeBatch(final BatchHandler handler, final String requestContentType, final InputStream content) { assertFalse(getContext().isInBatchMode()); ODataResponse batchResponse; List<BatchResponsePart> batchResponseParts = new ArrayList<BatchResponsePart>(); PathInfoImpl pathInfo = new PathInfoImpl(); try { pathInfo.setServiceRoot(new URI("http://localhost:19000/odata")); EntityProviderBatchProperties batchProperties = EntityProviderBatchProperties.init().pathInfo(pathInfo).build(); List<BatchRequestPart> batchParts = EntityProvider.parseBatchRequest(requestContentType, content, batchProperties); for (BatchRequestPart batchPart : batchParts) { batchResponseParts.add(handler.handleBatchPart(batchPart)); } batchResponse = EntityProvider.writeBatchResponse(batchResponseParts); } catch (URISyntaxException e) { throw new RuntimeException(e); } catch (ODataException e) { throw new RuntimeException(e); } return batchResponse; } @Override public BatchResponsePart executeChangeSet(final BatchHandler handler, final List<ODataRequest> requests) throws ODataException { assertTrue(getContext().isInBatchMode()); List<ODataResponse> responses = new ArrayList<ODataResponse>(); for (ODataRequest request : requests) { ODataResponse response = handler.handleRequest(request); if (response.getStatus().getStatusCode() >= HttpStatusCodes.BAD_REQUEST.getStatusCode()) { // Rollback List<ODataResponse> errorResponses = new ArrayList<ODataResponse>(1); errorResponses.add(response); return BatchResponsePart.responses(errorResponses).changeSet(false).build(); } responses.add(response); } return BatchResponsePart.responses(responses).changeSet(true).build(); } @Override public ODataResponse readEntitySimpleProperty(final GetSimplePropertyUriInfo uriInfo, final String contentType) throws ODataException { assertTrue(getContext().isInBatchMode()); CircleStreamBuffer buffer = new CircleStreamBuffer(); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(buffer.getOutputStream())); JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer); try { jsonStreamWriter.beginObject() .name(FormatJson.D) .beginObject() .namedStringValue("EmployeeName", "Walter Winter") .endObject() .endObject(); writer.flush(); buffer.closeWrite(); } catch (IOException e) { buffer.close(); throw new RuntimeException(e); } ODataResponse oDataResponse = ODataResponse.entity(buffer.getInputStream()).status(HttpStatusCodes.OK).contentHeader("application/json") .build(); return oDataResponse; } @Override public ODataResponse updateEntitySimpleProperty(final PutMergePatchUriInfo uriInfo, final InputStream content, final String requestContentType, final String contentType) throws ODataException { assertTrue(getContext().isInBatchMode()); ODataResponse oDataResponse = ODataResponse.status(HttpStatusCodes.NO_CONTENT).build(); return oDataResponse; } } @Override protected ODataSingleProcessor createProcessor() throws ODataException { return new TestSingleProc(); } @Override protected ODataService createService() throws ODataException { final EdmProvider provider = createEdmProvider(); final ODataSingleProcessor processor = createProcessor(); return new ODataSingleProcessorService(provider, processor) { Edm edm = MockFacade.getMockEdm(); @Override public Edm getEntityDataModel() throws ODataException { return edm; } }; } }
package com.sim.core.agents.tank; import com.sim.core.Sensors.Sharp; import com.sim.core.Sensors.SharpManager; import com.sim.core.interfaces.Agent; import com.sim.core.interfaces.OnlyReadableTrack; import com.sim.core.interfaces.ResetFunction; import com.sim.core.math.Vector2f; import com.sim.core.math.genetics.Chromosome; import java.awt.*; /** * Created by kirill-good on 9.3.15. */ public class Tank implements Agent { protected static int carId=0; protected int id = carId++; protected Vector2f pos = new Vector2f(0,0); protected Vector2f dir = new Vector2f(1,0); protected double maxSpeed=5; protected double turnAngle=Math.PI/60; protected int length=50; protected int width=10; protected TankControl tankControl; protected SharpManager sharpManager = new SharpManager(); protected OnlyReadableTrack track = null; protected double leftOfPath = 100; protected ResetFunction resetFunction = new ResetFunction() { @Override public void reset(Agent agent) { } }; protected Color color = Color.black; protected TankState tankState = TankState.STOP; @Override public void tick() { if(leftOfPath<0){ return; } sharpManager.tick(track,pos,dir,length); tankControl.tick(); double x; double y; switch (tankState){ case STOP: break; case FORWARD: x = pos.getX() + dir.getX() * maxSpeed; y = pos.getY() + dir.getY() * maxSpeed; pos.setXY(x,y); leftOfPath -= maxSpeed; break; case BACKWARD: x = pos.getX() + dir.getX() * maxSpeed; y = pos.getY() + dir.getY() * maxSpeed; pos.setXY(x,y); leftOfPath -= maxSpeed; break; case TURNLEFT: dir.turn( +turnAngle ); dir.normalization(); break; case TURNRIGHT: dir.turn( -turnAngle ); dir.normalization(); break; default: throw new RuntimeException("bad tankState"); } } @Override public String toString() { return "{"+this.id+"; " + tankControl.toString() + " ]"; } @Override public void setTrack(OnlyReadableTrack onlyReadableTrack) { this.track = onlyReadableTrack; } @Override public boolean collision() { int x1 = (int)(this.pos.getX() + this.dir.getX() * length/2); int y1 = (int)(this.pos.getY() + this.dir.getY() * length/2); int x2 = (int)(this.pos.getX() - this.dir.getX() * length/2); int y2 = (int)(this.pos.getY() - this.dir.getY() * length/2); return track.getPix(x1,y1) || track.getPix(x2,y2); } @Override public void paint(Graphics g, int dx, int dy) { g.setColor(color); int x1 = dx+(int)(this.pos.getX() + this.dir.getX() * length*0.5); int y1 = dy+(int)(this.pos.getY() + this.dir.getY() * length*0.5); int x2 = dx+(int)(this.pos.getX() - this.dir.getX() * length*0.5); int y2 = dy+(int)(this.pos.getY() - this.dir.getY() * length*0.5); g.drawString(String.valueOf(id),dx + (x1+x2)/2,dy + (y1+y2)/2); g.drawLine(x1,y1,x2,y2); } @Override public void setPos(double x, double y) { this.pos.setXY(x,y); } @Override public void setDir(double x, double y) { this.dir.setXY(x,y); this.dir.normalization(); } @Override public SharpManager getSharpManager() { return sharpManager; } @Override public void addSharp(Sharp sharp) { sharpManager.addSharp(sharp); } @Override public double getX() { return this.pos.getX(); } @Override public double getY() { return this.pos.getY(); } @Override public void reset() { this.resetFunction.reset(this); } @Override public void setResetFunction(ResetFunction resetFunction) { this.resetFunction = resetFunction; } @Override public double getLeftOfPath() { return leftOfPath; } @Override public void setLeftOfPath(double leftOfPath) { this.leftOfPath = leftOfPath; } @Override public void setChromosome(Chromosome chromosome) { tankControl.setChromosome(chromosome); } @Override public Chromosome getChromosome() { return tankControl.getChromosome(); } @Override public int getNumOfGens() { return tankControl.getNumOfGens(); } public double getMaxSpeed() { return maxSpeed; } public void setMaxSpeed(double maxSpeed) { this.maxSpeed = maxSpeed; } public double getTurnAngle() { return turnAngle; } public void setTurnAngle(double turnAngle) { this.turnAngle = turnAngle; } public int getLength() { return length; } public void setLength(int length) { this.length = length; } public int getWidth() { return width; } public void setWidth(int width) { this.width = width; } public TankControl getTankControl() { return tankControl; } public void setTankControl(TankControl tankControl) { this.tankControl = tankControl; tankControl.setTank(this); } public ResetFunction getResetFunction() { return resetFunction; } public Color getColor() { return color; } public void setColor(Color color) { this.color = color; } public TankState getTankState() { return tankState; } public void setTankState(TankState tankState) { this.tankState = tankState; } public OnlyReadableTrack getTrack() { return track; } public int getId() { return id; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.store.cassandra.common; import java.beans.PropertyDescriptor; import java.lang.reflect.Field; import java.math.BigDecimal; import java.math.BigInteger; import java.net.InetAddress; import java.nio.ByteBuffer; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.UUID; import com.datastax.driver.core.DataType; import com.datastax.driver.core.Row; import org.apache.commons.beanutils.PropertyUtils; import org.apache.ignite.cache.store.cassandra.persistence.PojoFieldAccessor; import org.apache.ignite.cache.store.cassandra.serializer.Serializer; /** * Helper class providing bunch of methods to discover fields of POJO objects and * map builtin Java types to appropriate Cassandra types. */ public class PropertyMappingHelper { /** Bytes array Class type. */ private static final Class BYTES_ARRAY_CLASS = (new byte[] {}).getClass(); /** Mapping from Java to Cassandra types. */ private static final Map<Class, DataType.Name> JAVA_TO_CASSANDRA_MAPPING = new HashMap<Class, DataType.Name>() {{ put(String.class, DataType.Name.TEXT); put(Integer.class, DataType.Name.INT); put(int.class, DataType.Name.INT); put(Short.class, DataType.Name.SMALLINT); put(short.class, DataType.Name.SMALLINT); put(Long.class, DataType.Name.BIGINT); put(long.class, DataType.Name.BIGINT); put(Double.class, DataType.Name.DOUBLE); put(double.class, DataType.Name.DOUBLE); put(Boolean.class, DataType.Name.BOOLEAN); put(boolean.class, DataType.Name.BOOLEAN); put(Float.class, DataType.Name.FLOAT); put(float.class, DataType.Name.FLOAT); put(ByteBuffer.class, DataType.Name.BLOB); put(BYTES_ARRAY_CLASS, DataType.Name.BLOB); put(BigDecimal.class, DataType.Name.DECIMAL); put(InetAddress.class, DataType.Name.INET); put(Date.class, DataType.Name.TIMESTAMP); put(UUID.class, DataType.Name.UUID); put(BigInteger.class, DataType.Name.VARINT); }}; /** * Maps Cassandra type to specified Java type. * * @param clazz java class. * * @return Cassandra type. */ public static DataType.Name getCassandraType(Class clazz) { return JAVA_TO_CASSANDRA_MAPPING.get(clazz); } /** * Returns property accessor by class property name. * * @param clazz class from which to get property accessor. * @param prop name of the property. * * @return property accessor. */ public static PojoFieldAccessor getPojoFieldAccessor(Class clazz, String prop) { PropertyDescriptor[] descriptors = PropertyUtils.getPropertyDescriptors(clazz); if (descriptors != null) { for (PropertyDescriptor descriptor : descriptors) { if (descriptor.getName().equals(prop)) { Field field = null; try { field = clazz.getDeclaredField(prop); } catch (Throwable ignore) { } return new PojoFieldAccessor(descriptor, field); } } } try { return new PojoFieldAccessor(clazz.getDeclaredField(prop)); } catch (Throwable e) { throw new IllegalArgumentException("POJO class " + clazz.getName() + " doesn't have '" + prop + "' property"); } } /** * Returns value of specific column in the row returned by CQL statement. * * @param row row returned by CQL statement. * @param col column name. * @param clazz java class to which column value should be casted. * @param serializer serializer to use if column stores BLOB otherwise could be null. * * @return row column value. */ public static Object getCassandraColumnValue(Row row, String col, Class clazz, Serializer serializer) { if (String.class.equals(clazz)) return row.getString(col); if (Integer.class.equals(clazz)) return row.isNull(col) ? null : row.getInt(col); if (int.class.equals(clazz)) { if (row.isNull(col)) { throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + "' to " + "int value used in domain object model"); } return row.getInt(col); } if (Short.class.equals(clazz)) return row.isNull(col) ? null : row.getShort(col); if (short.class.equals(clazz)) { if (row.isNull(col)) { throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + "' to " + "short value used in domain object model"); } return row.getShort(col); } if (Long.class.equals(clazz)) return row.isNull(col) ? null : row.getLong(col); if (long.class.equals(clazz)) { if (row.isNull(col)) { throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + "' to " + "long value used in domain object model"); } return row.getLong(col); } if (Double.class.equals(clazz)) return row.isNull(col) ? null : row.getDouble(col); if (double.class.equals(clazz)) { if (row.isNull(col)) { throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + "' to " + "double value used in domain object model"); } return row.getDouble(col); } if (Boolean.class.equals(clazz)) return row.isNull(col) ? null : row.getBool(col); if (boolean.class.equals(clazz)) { if (row.isNull(col)) { throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + "' to " + "boolean value used in domain object model"); } return row.getBool(col); } if (Float.class.equals(clazz)) return row.isNull(col) ? null : row.getFloat(col); if (float.class.equals(clazz)) { if (row.isNull(col)) { throw new IllegalArgumentException("Can't cast null value from Cassandra table column '" + col + "' to " + "float value used in domain object model"); } return row.getFloat(col); } if (ByteBuffer.class.equals(clazz)) return row.getBytes(col); if (PropertyMappingHelper.BYTES_ARRAY_CLASS.equals(clazz)) { ByteBuffer buf = row.getBytes(col); return buf == null ? null : buf.array(); } if (BigDecimal.class.equals(clazz)) return row.getDecimal(col); if (InetAddress.class.equals(clazz)) return row.getInet(col); if (Date.class.equals(clazz)) return row.getTimestamp(col); if (UUID.class.equals(clazz)) return row.getUUID(col); if (BigInteger.class.equals(clazz)) return row.getVarint(col); if (serializer == null) { throw new IllegalStateException("Can't deserialize value from '" + col + "' Cassandra column, " + "cause there is no BLOB serializer specified"); } ByteBuffer buf = row.getBytes(col); return buf == null ? null : serializer.deserialize(buf); } }
/* * Copyright 2007 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.event; import org.drools.RuleBase; import org.drools.rule.Package; import org.drools.rule.Rule; import org.kie.definition.process.Process; import java.util.Iterator; public class RuleBaseEventSupport extends AbstractEventSupport<RuleBaseEventListener> { private transient RuleBase ruleBase; public RuleBaseEventSupport() { } public RuleBaseEventSupport(final RuleBase ruleBase) { this.ruleBase = ruleBase; } public void setRuleBase(RuleBase ruleBase) { this.ruleBase = ruleBase; } public void fireBeforePackageAdded(final Package newPkg) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforePackageAddedEvent event = new BeforePackageAddedEvent(this.ruleBase, newPkg); do { iter.next().beforePackageAdded(event); } while (iter.hasNext()); } } public void fireAfterPackageAdded(final Package newPkg) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterPackageAddedEvent event = new AfterPackageAddedEvent(this.ruleBase, newPkg); do { iter.next().afterPackageAdded(event); } while (iter.hasNext()); } } public void fireBeforePackageRemoved(final Package pkg) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforePackageRemovedEvent event = new BeforePackageRemovedEvent(this.ruleBase, pkg); do { iter.next().beforePackageRemoved(event); } while (iter.hasNext()); } } public void fireAfterPackageRemoved(final Package pkg) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterPackageRemovedEvent event = new AfterPackageRemovedEvent(this.ruleBase, pkg); do { iter.next().afterPackageRemoved(event); } while (iter.hasNext()); } } public void fireBeforeRuleBaseLocked() { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforeRuleBaseLockedEvent event = new BeforeRuleBaseLockedEvent(this.ruleBase); do { iter.next().beforeRuleBaseLocked(event); } while (iter.hasNext()); } } public void fireAfterRuleBaseLocked() { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterRuleBaseLockedEvent event = new AfterRuleBaseLockedEvent(this.ruleBase); do { iter.next().afterRuleBaseLocked(event); } while (iter.hasNext()); } } public void fireBeforeRuleBaseUnlocked() { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforeRuleBaseUnlockedEvent event = new BeforeRuleBaseUnlockedEvent(this.ruleBase); do { iter.next().beforeRuleBaseUnlocked(event); } while (iter.hasNext()); } } public void fireAfterRuleBaseUnlocked() { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterRuleBaseUnlockedEvent event = new AfterRuleBaseUnlockedEvent(this.ruleBase); do { iter.next().afterRuleBaseUnlocked(event); } while (iter.hasNext()); } } public void fireBeforeRuleAdded(final Package newPkg, final Rule rule) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforeRuleAddedEvent event = new BeforeRuleAddedEvent(this.ruleBase, newPkg, rule); do { iter.next().beforeRuleAdded(event); } while (iter.hasNext()); } } public void fireAfterRuleAdded(final Package newPkg, final Rule rule) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterRuleAddedEvent event = new AfterRuleAddedEvent(this.ruleBase, newPkg, rule); do { iter.next().afterRuleAdded(event); } while (iter.hasNext()); } } public void fireBeforeRuleRemoved(final Package pkg, final Rule rule) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforeRuleRemovedEvent event = new BeforeRuleRemovedEvent(this.ruleBase, pkg, rule); do { iter.next().beforeRuleRemoved(event); } while (iter.hasNext()); } } public void fireAfterRuleRemoved(final Package pkg, final Rule rule) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterRuleRemovedEvent event = new AfterRuleRemovedEvent(this.ruleBase, pkg, rule); do { iter.next().afterRuleRemoved(event); } while (iter.hasNext()); } } public void fireBeforeFunctionRemoved(final Package pkg, final String function) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforeFunctionRemovedEvent event = new BeforeFunctionRemovedEvent(this.ruleBase, pkg, function); do { iter.next().beforeFunctionRemoved(event); } while (iter.hasNext()); } } public void fireAfterFunctionRemoved(final Package pkg, final String function) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterFunctionRemovedEvent event = new AfterFunctionRemovedEvent(this.ruleBase, pkg, function); do { iter.next().afterFunctionRemoved(event); } while (iter.hasNext()); } } public void fireBeforeProcessAdded(final Process process) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforeProcessAddedEvent event = new BeforeProcessAddedEvent(process); do { iter.next().beforeProcessAdded(event); } while (iter.hasNext()); } } public void fireAfterProcessAdded(final Process process) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterProcessAddedEvent event = new AfterProcessAddedEvent(process); do { iter.next().afterProcessAdded(event); } while (iter.hasNext()); } } public void fireBeforeProcessRemoved(final Process process) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final BeforeProcessRemovedEvent event = new BeforeProcessRemovedEvent(process); do { iter.next().beforeProcessRemoved(event); } while (iter.hasNext()); } } public void fireAfterProcessRemoved(final Process process) { final Iterator<RuleBaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final AfterProcessRemovedEvent event = new AfterProcessRemovedEvent(process); do { iter.next().afterProcessRemoved(event); } while (iter.hasNext()); } } }
/** * <copyright> </copyright> * * $Id$ */ package edu.teco.dnd.graphiti.model.impl; import java.util.Collection; import java.util.Map.Entry; import java.util.UUID; import org.apache.bcel.classfile.JavaClass; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; import org.eclipse.emf.ecore.util.EObjectContainmentWithInverseEList; import org.eclipse.emf.ecore.util.InternalEList; import edu.teco.dnd.blocks.FunctionBlockClass; import edu.teco.dnd.graphiti.model.FunctionBlockModel; import edu.teco.dnd.graphiti.model.InputModel; import edu.teco.dnd.graphiti.model.ModelPackage; import edu.teco.dnd.graphiti.model.OptionModel; import edu.teco.dnd.graphiti.model.OutputModel; /** * <!-- begin-user-doc --> An implementation of the model object '<em><b>Function Block Model</b></em>'. <!-- * end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link edu.teco.dnd.graphiti.model.impl.FunctionBlockModelImpl#getType <em>Type</em>}</li> * <li>{@link edu.teco.dnd.graphiti.model.impl.FunctionBlockModelImpl#getInputs <em>Inputs</em>}</li> * <li>{@link edu.teco.dnd.graphiti.model.impl.FunctionBlockModelImpl#getOutputs <em>Outputs</em>}</li> * <li>{@link edu.teco.dnd.graphiti.model.impl.FunctionBlockModelImpl#getOptions <em>Options</em>}</li> * <li>{@link edu.teco.dnd.graphiti.model.impl.FunctionBlockModelImpl#getID <em>ID</em>}</li> * <li>{@link edu.teco.dnd.graphiti.model.impl.FunctionBlockModelImpl#getPosition <em>Position</em>}</li> * <li>{@link edu.teco.dnd.graphiti.model.impl.FunctionBlockModelImpl#getBlockName <em>Block Name</em>}</li> * <li>{@link edu.teco.dnd.graphiti.model.impl.FunctionBlockModelImpl#getBlockClass <em>Block Class</em>}</li> * </ul> * </p> * * @generated */ public class FunctionBlockModelImpl extends EObjectImpl implements FunctionBlockModel { /** * The default value of the '{@link #getType() <em>Type</em>}' attribute. <!-- begin-user-doc --> <!-- end-user-doc * --> * * @see #getType() * @generated * @ordered */ protected static final String TYPE_EDEFAULT = null; /** * The cached value of the '{@link #getType() <em>Type</em>}' attribute. <!-- begin-user-doc --> <!-- end-user-doc * --> * * @see #getType() * @generated * @ordered */ protected String type = TYPE_EDEFAULT; /** * The cached value of the '{@link #getInputs() <em>Inputs</em>}' containment reference list. <!-- begin-user-doc * --> <!-- end-user-doc --> * * @see #getInputs() * @generated * @ordered */ protected EList<InputModel> inputs; /** * The cached value of the '{@link #getOutputs() <em>Outputs</em>}' containment reference list. <!-- begin-user-doc * --> <!-- end-user-doc --> * * @see #getOutputs() * @generated * @ordered */ protected EList<OutputModel> outputs; /** * The cached value of the '{@link #getOptions() <em>Options</em>}' containment reference list. <!-- begin-user-doc * --> <!-- end-user-doc --> * * @see #getOptions() * @generated * @ordered */ protected EList<OptionModel> options; /** * The default value of the '{@link #getID() <em>ID</em>}' attribute. <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #getID() * @generated * @ordered */ protected static final UUID ID_EDEFAULT = null; /** * The cached value of the '{@link #getID() <em>ID</em>}' attribute. <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #getID() * @generated * @ordered */ protected UUID iD = ID_EDEFAULT; /** * The default value of the '{@link #getPosition() <em>Position</em>}' attribute. <!-- begin-user-doc --> <!-- * end-user-doc --> * * @see #getPosition() * @generated * @ordered */ protected static final String POSITION_EDEFAULT = null; /** * The cached value of the '{@link #getPosition() <em>Position</em>}' attribute. <!-- begin-user-doc --> <!-- * end-user-doc --> * * @see #getPosition() * @generated * @ordered */ protected String position = POSITION_EDEFAULT; /** * The default value of the '{@link #getBlockName() <em>Block Name</em>}' attribute. <!-- begin-user-doc --> <!-- * end-user-doc --> * * @see #getBlockName() * @generated * @ordered */ protected static final String BLOCK_NAME_EDEFAULT = null; /** * The cached value of the '{@link #getBlockName() <em>Block Name</em>}' attribute. <!-- begin-user-doc --> <!-- * end-user-doc --> * * @see #getBlockName() * @generated * @ordered */ protected String blockName = BLOCK_NAME_EDEFAULT; /** * The default value of the '{@link #getBlockClass() <em>Block Class</em>}' attribute. <!-- begin-user-doc --> <!-- * end-user-doc --> * * @see #getBlockClass() * @generated * @ordered */ protected static final String BLOCK_CLASS_EDEFAULT = null; /** * The cached value of the '{@link #getBlockClass() <em>Block Class</em>}' attribute. <!-- begin-user-doc --> <!-- * end-user-doc --> * * @see #getBlockClass() * @generated * @ordered */ protected String blockClass = BLOCK_CLASS_EDEFAULT; /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ protected FunctionBlockModelImpl() { super(); } protected FunctionBlockModelImpl(FunctionBlockClass cls) throws ClassNotFoundException { super(); setID(UUID.randomUUID()); if (cls != null) { setBlockName(cls.getClassName()); setBlockClass(cls.getClassName()); setType(cls.getBlockType()); for (final Entry<String, JavaClass> input : cls.getInputs().entrySet()) { final InputModel inputModel = ModelFactoryImpl.eINSTANCE.createInputModel(); inputModel.setFunctionBlock(this); inputModel.setName(input.getKey()); inputModel.setType(input.getValue().getClassName()); } for (final Entry<String, JavaClass> output : cls.getOutputs().entrySet()) { final OutputModel outputModel = ModelFactoryImpl.eINSTANCE.createOutputModel(); outputModel.setFunctionBlock(this); outputModel.setName(output.getKey()); outputModel.setType(output.getValue().getClassName()); } for (final Entry<String, String> option : cls.getOptions().entrySet()) { final OptionModel optionModel = ModelFactoryImpl.eINSTANCE.createOptionModel(); optionModel.setFunctionBlock(this); optionModel.setName(option.getKey()); optionModel.setValue(option.getValue()); optionModel.setType("java.lang.String"); //$NON-NLS-1$ } } } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override protected EClass eStaticClass() { return ModelPackage.Literals.FUNCTION_BLOCK_MODEL; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public String getType() { return type; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public void setType(String newType) { String oldType = type; type = newType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.FUNCTION_BLOCK_MODEL__TYPE, oldType, type)); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public EList<InputModel> getInputs() { if (inputs == null) { inputs = new EObjectContainmentWithInverseEList<InputModel>(InputModel.class, this, ModelPackage.FUNCTION_BLOCK_MODEL__INPUTS, ModelPackage.INPUT_MODEL__FUNCTION_BLOCK); } return inputs; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public EList<OutputModel> getOutputs() { if (outputs == null) { outputs = new EObjectContainmentWithInverseEList<OutputModel>(OutputModel.class, this, ModelPackage.FUNCTION_BLOCK_MODEL__OUTPUTS, ModelPackage.OUTPUT_MODEL__FUNCTION_BLOCK); } return outputs; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public EList<OptionModel> getOptions() { if (options == null) { options = new EObjectContainmentWithInverseEList<OptionModel>(OptionModel.class, this, ModelPackage.FUNCTION_BLOCK_MODEL__OPTIONS, ModelPackage.OPTION_MODEL__FUNCTION_BLOCK); } return options; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public UUID getID() { return iD; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public void setID(UUID newID) { UUID oldID = iD; iD = newID; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.FUNCTION_BLOCK_MODEL__ID, oldID, iD)); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public String getPosition() { return position; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public void setPosition(String newPosition) { String oldPosition = position; position = newPosition; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.FUNCTION_BLOCK_MODEL__POSITION, oldPosition, position)); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public String getBlockName() { return blockName; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public void setBlockName(String newBlockName) { String oldBlockName = blockName; blockName = newBlockName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_NAME, oldBlockName, blockName)); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public String getBlockClass() { return blockClass; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public void setBlockClass(String newBlockClass) { String oldBlockClass = blockClass; blockClass = newBlockClass; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_CLASS, oldBlockClass, blockClass)); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case ModelPackage.FUNCTION_BLOCK_MODEL__INPUTS: return ((InternalEList<InternalEObject>) (InternalEList<?>) getInputs()).basicAdd(otherEnd, msgs); case ModelPackage.FUNCTION_BLOCK_MODEL__OUTPUTS: return ((InternalEList<InternalEObject>) (InternalEList<?>) getOutputs()).basicAdd(otherEnd, msgs); case ModelPackage.FUNCTION_BLOCK_MODEL__OPTIONS: return ((InternalEList<InternalEObject>) (InternalEList<?>) getOptions()).basicAdd(otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case ModelPackage.FUNCTION_BLOCK_MODEL__INPUTS: return ((InternalEList<?>) getInputs()).basicRemove(otherEnd, msgs); case ModelPackage.FUNCTION_BLOCK_MODEL__OUTPUTS: return ((InternalEList<?>) getOutputs()).basicRemove(otherEnd, msgs); case ModelPackage.FUNCTION_BLOCK_MODEL__OPTIONS: return ((InternalEList<?>) getOptions()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case ModelPackage.FUNCTION_BLOCK_MODEL__TYPE: return getType(); case ModelPackage.FUNCTION_BLOCK_MODEL__INPUTS: return getInputs(); case ModelPackage.FUNCTION_BLOCK_MODEL__OUTPUTS: return getOutputs(); case ModelPackage.FUNCTION_BLOCK_MODEL__OPTIONS: return getOptions(); case ModelPackage.FUNCTION_BLOCK_MODEL__ID: return getID(); case ModelPackage.FUNCTION_BLOCK_MODEL__POSITION: return getPosition(); case ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_NAME: return getBlockName(); case ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_CLASS: return getBlockClass(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case ModelPackage.FUNCTION_BLOCK_MODEL__TYPE: setType((String) newValue); return; case ModelPackage.FUNCTION_BLOCK_MODEL__INPUTS: getInputs().clear(); getInputs().addAll((Collection<? extends InputModel>) newValue); return; case ModelPackage.FUNCTION_BLOCK_MODEL__OUTPUTS: getOutputs().clear(); getOutputs().addAll((Collection<? extends OutputModel>) newValue); return; case ModelPackage.FUNCTION_BLOCK_MODEL__OPTIONS: getOptions().clear(); getOptions().addAll((Collection<? extends OptionModel>) newValue); return; case ModelPackage.FUNCTION_BLOCK_MODEL__ID: setID((UUID) newValue); return; case ModelPackage.FUNCTION_BLOCK_MODEL__POSITION: setPosition((String) newValue); return; case ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_NAME: setBlockName((String) newValue); return; case ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_CLASS: setBlockClass((String) newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case ModelPackage.FUNCTION_BLOCK_MODEL__TYPE: setType(TYPE_EDEFAULT); return; case ModelPackage.FUNCTION_BLOCK_MODEL__INPUTS: getInputs().clear(); return; case ModelPackage.FUNCTION_BLOCK_MODEL__OUTPUTS: getOutputs().clear(); return; case ModelPackage.FUNCTION_BLOCK_MODEL__OPTIONS: getOptions().clear(); return; case ModelPackage.FUNCTION_BLOCK_MODEL__ID: setID(ID_EDEFAULT); return; case ModelPackage.FUNCTION_BLOCK_MODEL__POSITION: setPosition(POSITION_EDEFAULT); return; case ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_NAME: setBlockName(BLOCK_NAME_EDEFAULT); return; case ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_CLASS: setBlockClass(BLOCK_CLASS_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case ModelPackage.FUNCTION_BLOCK_MODEL__TYPE: return TYPE_EDEFAULT == null ? type != null : !TYPE_EDEFAULT.equals(type); case ModelPackage.FUNCTION_BLOCK_MODEL__INPUTS: return inputs != null && !inputs.isEmpty(); case ModelPackage.FUNCTION_BLOCK_MODEL__OUTPUTS: return outputs != null && !outputs.isEmpty(); case ModelPackage.FUNCTION_BLOCK_MODEL__OPTIONS: return options != null && !options.isEmpty(); case ModelPackage.FUNCTION_BLOCK_MODEL__ID: return ID_EDEFAULT == null ? iD != null : !ID_EDEFAULT.equals(iD); case ModelPackage.FUNCTION_BLOCK_MODEL__POSITION: return POSITION_EDEFAULT == null ? position != null : !POSITION_EDEFAULT.equals(position); case ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_NAME: return BLOCK_NAME_EDEFAULT == null ? blockName != null : !BLOCK_NAME_EDEFAULT.equals(blockName); case ModelPackage.FUNCTION_BLOCK_MODEL__BLOCK_CLASS: return BLOCK_CLASS_EDEFAULT == null ? blockClass != null : !BLOCK_CLASS_EDEFAULT.equals(blockClass); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (type: "); result.append(type); result.append(", iD: "); result.append(iD); result.append(", position: "); result.append(position); result.append(", blockName: "); result.append(blockName); result.append(", blockClass: "); result.append(blockClass); result.append(')'); return result.toString(); } @Override public String getTypeName() { if (type != null && !type.isEmpty()) { return type.substring(type.lastIndexOf('.') + 1); } return type; } @Override public boolean isSensor() { return getInputs().isEmpty() && !getOutputs().isEmpty(); } @Override public boolean isActor() { return !getInputs().isEmpty() && getOutputs().isEmpty(); } } // FunctionBlockModelImpl
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNameModule; import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory; import org.elasticsearch.index.settings.IndexSettingsModule; import org.elasticsearch.indices.analysis.IndicesAnalysisModule; import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.test.ElasticsearchTestCase; import org.hamcrest.MatcherAssert; import org.junit.Test; import java.io.BufferedWriter; import java.io.IOException; import java.io.StringReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Set; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.hamcrest.Matchers.*; /** * */ public class AnalysisModuleTests extends ElasticsearchTestCase { private Injector injector; public AnalysisService getAnalysisService(Settings settings) { Index index = new Index("test"); Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector(); injector = new ModulesBuilder().add( new IndexSettingsModule(index, settings), new IndexNameModule(index), new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class))) .createChildInjector(parentInjector); return injector.getInstance(AnalysisService.class); } private static Settings loadFromClasspath(String path) { return settingsBuilder().loadFromClasspath(path).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); } @Test public void testSimpleConfigurationJson() { Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.json"); testSimpleConfiguration(settings); } @Test public void testSimpleConfigurationYaml() { Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.yml"); testSimpleConfiguration(settings); } @Test public void testDefaultFactoryTokenFilters() throws IOException { assertTokenFilter("keyword_repeat", KeywordRepeatFilter.class); assertTokenFilter("persian_normalization", PersianNormalizationFilter.class); assertTokenFilter("arabic_normalization", ArabicNormalizationFilter.class); } @Test public void testVersionedAnalyzers() throws Exception { Settings settings2 = settingsBuilder().loadFromClasspath("org/elasticsearch/index/analysis/test1.yml") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0).build(); AnalysisService analysisService2 = getAnalysisService(settings2); // indicesanalysisservice always has the current version IndicesAnalysisService indicesAnalysisService2 = injector.getInstance(IndicesAnalysisService.class); assertThat(indicesAnalysisService2.analyzer("default"), is(instanceOf(NamedAnalyzer.class))); NamedAnalyzer defaultNamedAnalyzer = (NamedAnalyzer) indicesAnalysisService2.analyzer("default"); assertThat(defaultNamedAnalyzer.analyzer(), is(instanceOf(StandardAnalyzer.class))); assertEquals(Version.CURRENT.luceneVersion, defaultNamedAnalyzer.analyzer().getVersion()); // analysis service has the expected version assertThat(analysisService2.analyzer("standard").analyzer(), is(instanceOf(StandardAnalyzer.class))); assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion()); assertEquals(Version.V_0_90_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion()); } private void assertTokenFilter(String name, Class clazz) throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(ImmutableSettings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter(name); Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader("foo bar")); TokenStream stream = tokenFilter.create(tokenizer); assertThat(stream, instanceOf(clazz)); } private void testSimpleConfiguration(Settings settings) { AnalysisService analysisService = getAnalysisService(settings); Analyzer analyzer = analysisService.analyzer("custom1").analyzer(); assertThat(analyzer, instanceOf(CustomAnalyzer.class)); CustomAnalyzer custom1 = (CustomAnalyzer) analyzer; assertThat(custom1.tokenizerFactory(), instanceOf(StandardTokenizerFactory.class)); assertThat(custom1.tokenFilters().length, equalTo(2)); StopTokenFilterFactory stop1 = (StopTokenFilterFactory) custom1.tokenFilters()[0]; assertThat(stop1.stopWords().size(), equalTo(1)); //assertThat((Iterable<char[]>) stop1.stopWords(), hasItem("test-stop".toCharArray())); analyzer = analysisService.analyzer("custom2").analyzer(); assertThat(analyzer, instanceOf(CustomAnalyzer.class)); CustomAnalyzer custom2 = (CustomAnalyzer) analyzer; // HtmlStripCharFilterFactory html = (HtmlStripCharFilterFactory) custom2.charFilters()[0]; // assertThat(html.readAheadLimit(), equalTo(HTMLStripCharFilter.DEFAULT_READ_AHEAD)); // // html = (HtmlStripCharFilterFactory) custom2.charFilters()[1]; // assertThat(html.readAheadLimit(), equalTo(1024)); // verify characters mapping analyzer = analysisService.analyzer("custom5").analyzer(); assertThat(analyzer, instanceOf(CustomAnalyzer.class)); CustomAnalyzer custom5 = (CustomAnalyzer) analyzer; assertThat(custom5.charFilters()[0], instanceOf(MappingCharFilterFactory.class)); // verify aliases analyzer = analysisService.analyzer("alias1").analyzer(); assertThat(analyzer, instanceOf(StandardAnalyzer.class)); // check custom pattern replace filter analyzer = analysisService.analyzer("custom3").analyzer(); assertThat(analyzer, instanceOf(CustomAnalyzer.class)); CustomAnalyzer custom3 = (CustomAnalyzer) analyzer; PatternReplaceCharFilterFactory patternReplaceCharFilterFactory = (PatternReplaceCharFilterFactory) custom3.charFilters()[0]; assertThat(patternReplaceCharFilterFactory.getPattern().pattern(), equalTo("sample(.*)")); assertThat(patternReplaceCharFilterFactory.getReplacement(), equalTo("replacedSample $1")); // check custom class name (my) analyzer = analysisService.analyzer("custom4").analyzer(); assertThat(analyzer, instanceOf(CustomAnalyzer.class)); CustomAnalyzer custom4 = (CustomAnalyzer) analyzer; assertThat(custom4.tokenFilters()[0], instanceOf(MyFilterTokenFilterFactory.class)); // // verify Czech stemmer // analyzer = analysisService.analyzer("czechAnalyzerWithStemmer").analyzer(); // assertThat(analyzer, instanceOf(CustomAnalyzer.class)); // CustomAnalyzer czechstemmeranalyzer = (CustomAnalyzer) analyzer; // assertThat(czechstemmeranalyzer.tokenizerFactory(), instanceOf(StandardTokenizerFactory.class)); // assertThat(czechstemmeranalyzer.tokenFilters().length, equalTo(4)); // assertThat(czechstemmeranalyzer.tokenFilters()[3], instanceOf(CzechStemTokenFilterFactory.class)); // // // check dictionary decompounder // analyzer = analysisService.analyzer("decompoundingAnalyzer").analyzer(); // assertThat(analyzer, instanceOf(CustomAnalyzer.class)); // CustomAnalyzer dictionaryDecompounderAnalyze = (CustomAnalyzer) analyzer; // assertThat(dictionaryDecompounderAnalyze.tokenizerFactory(), instanceOf(StandardTokenizerFactory.class)); // assertThat(dictionaryDecompounderAnalyze.tokenFilters().length, equalTo(1)); // assertThat(dictionaryDecompounderAnalyze.tokenFilters()[0], instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); Set<?> wordList = Analysis.getWordSet(null, settings, "index.analysis.filter.dict_dec.word_list"); MatcherAssert.assertThat(wordList.size(), equalTo(6)); // MatcherAssert.assertThat(wordList, hasItems("donau", "dampf", "schiff", "spargel", "creme", "suppe")); } @Test public void testWordListPath() throws Exception { Environment env = new Environment(ImmutableSettings.Builder.EMPTY_SETTINGS); String[] words = new String[]{"donau", "dampf", "schiff", "spargel", "creme", "suppe"}; Path wordListFile = generateWordList(words); Settings settings = settingsBuilder().loadFromSource("index: \n word_list_path: " + wordListFile.toAbsolutePath()).build(); Set<?> wordList = Analysis.getWordSet(env, settings, "index.word_list"); MatcherAssert.assertThat(wordList.size(), equalTo(6)); // MatcherAssert.assertThat(wordList, hasItems(words)); Files.delete(wordListFile); } private Path generateWordList(String[] words) throws Exception { Path wordListFile = newTempDirPath().resolve("wordlist.txt"); try (BufferedWriter writer = Files.newBufferedWriter(wordListFile, StandardCharsets.UTF_8)) { for (String word : words) { writer.write(word); writer.write('\n'); } } return wordListFile; } }
/* * * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package springfox.documentation.spi.service.contexts; import com.fasterxml.classmate.TypeResolver; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.Ordering; import org.springframework.web.bind.annotation.RequestMethod; import springfox.documentation.PathProvider; import springfox.documentation.RequestHandler; import springfox.documentation.schema.AlternateTypeRule; import springfox.documentation.service.ApiDescription; import springfox.documentation.service.ApiInfo; import springfox.documentation.service.ApiListingReference; import springfox.documentation.service.Operation; import springfox.documentation.service.Parameter; import springfox.documentation.service.ResponseMessage; import springfox.documentation.service.SecurityScheme; import springfox.documentation.spi.DocumentationType; import springfox.documentation.spi.schema.GenericTypeNamingStrategy; import springfox.documentation.spi.service.ResourceGroupingStrategy; import java.util.List; import java.util.Map; import java.util.Set; import static com.google.common.collect.FluentIterable.*; import static com.google.common.collect.Lists.*; import static com.google.common.collect.Maps.*; import static com.google.common.collect.Sets.*; import static springfox.documentation.builders.BuilderDefaults.*; public class DocumentationContextBuilder { private TypeResolver typeResolver; private List<RequestHandler> handlerMappings; private ApiInfo apiInfo; private String groupName; private ResourceGroupingStrategy resourceGroupingStrategy; private PathProvider pathProvider; private List<? extends SecurityScheme> securitySchemes; private Ordering<ApiListingReference> listingReferenceOrdering; private Ordering<ApiDescription> apiDescriptionOrdering; private DocumentationType documentationType; private Ordering<Operation> operationOrdering; private boolean applyDefaultResponseMessages; private ApiSelector apiSelector = ApiSelector.DEFAULT; private final List<SecurityContext> securityContexts = newArrayList(); private final Set<Class> ignorableParameterTypes = newHashSet(); private final Map<RequestMethod, List<ResponseMessage>> responseMessageOverrides = newTreeMap(); private final List<Parameter> globalOperationParameters = newArrayList(); private final List<AlternateTypeRule> rules = newArrayList(); private final Map<RequestMethod, List<ResponseMessage>> defaultResponseMessages = newHashMap(); private final Set<String> protocols = newHashSet(); private final Set<String> produces = newHashSet(); private final Set<String> consumes = newHashSet(); private GenericTypeNamingStrategy genericsNamingStrategy; private Optional<String> pathMapping; private boolean isUrlTemplatesEnabled; public DocumentationContextBuilder(DocumentationType documentationType) { this.documentationType = documentationType; } public DocumentationContextBuilder requestHandlers(List<RequestHandler> handlerMappings) { this.handlerMappings = handlerMappings; return this; } public DocumentationContextBuilder apiInfo(ApiInfo apiInfo) { this.apiInfo = defaultIfAbsent(apiInfo, this.apiInfo); return this; } public DocumentationContextBuilder groupName(String groupName) { this.groupName = defaultIfAbsent(groupName, this.groupName); return this; } public DocumentationContextBuilder additionalIgnorableTypes(Set<Class> ignorableParameterTypes) { this.ignorableParameterTypes.addAll(ignorableParameterTypes); return this; } public DocumentationContextBuilder additionalResponseMessages( Map<RequestMethod, List<ResponseMessage>> additionalResponseMessages) { this.responseMessageOverrides.putAll(additionalResponseMessages); return this; } public DocumentationContextBuilder additionalOperationParameters(List<Parameter> globalRequestParameters) { this.globalOperationParameters.addAll(nullToEmptyList(globalRequestParameters)); return this; } public DocumentationContextBuilder withResourceGroupingStrategy(ResourceGroupingStrategy resourceGroupingStrategy) { this.resourceGroupingStrategy = resourceGroupingStrategy; return this; } public DocumentationContextBuilder pathProvider(PathProvider pathProvider) { this.pathProvider = defaultIfAbsent(pathProvider, this.pathProvider); return this; } public DocumentationContextBuilder securityContexts(List<SecurityContext> securityContext) { this.securityContexts.addAll(nullToEmptyList(securityContext)); return this; } public DocumentationContextBuilder securitySchemes(List<? extends SecurityScheme> securitySchemes) { this.securitySchemes = securitySchemes; return this; } public DocumentationContextBuilder apiListingReferenceOrdering( Ordering<ApiListingReference> listingReferenceOrdering) { this.listingReferenceOrdering = defaultIfAbsent(listingReferenceOrdering, this.listingReferenceOrdering); return this; } public DocumentationContextBuilder apiDescriptionOrdering(Ordering<ApiDescription> apiDescriptionOrdering) { this.apiDescriptionOrdering = defaultIfAbsent(apiDescriptionOrdering, this.apiDescriptionOrdering); return this; } private Map<RequestMethod, List<ResponseMessage>> aggregateResponseMessages() { Map<RequestMethod, List<ResponseMessage>> responseMessages = newHashMap(); if (applyDefaultResponseMessages) { responseMessages.putAll(defaultResponseMessages); } responseMessages.putAll(responseMessageOverrides); return responseMessages; } public DocumentationContextBuilder applyDefaultResponseMessages(boolean applyDefaultResponseMessages) { this.applyDefaultResponseMessages = applyDefaultResponseMessages; return this; } public DocumentationContextBuilder ruleBuilders(List<Function<TypeResolver, AlternateTypeRule>> ruleBuilders) { rules.addAll(from(ruleBuilders) .transform(evaluator(typeResolver)) .toList()); return this; } public DocumentationContextBuilder typeResolver(TypeResolver typeResolver) { this.typeResolver = typeResolver; return this; } public DocumentationContextBuilder operationOrdering(Ordering<Operation> operationOrdering) { this.operationOrdering = defaultIfAbsent(operationOrdering, this.operationOrdering); return this; } public DocumentationContextBuilder rules(List<AlternateTypeRule> rules) { this.rules.addAll(rules); return this; } public DocumentationContextBuilder defaultResponseMessages( Map<RequestMethod, List<ResponseMessage>> defaultResponseMessages) { this.defaultResponseMessages.putAll(defaultResponseMessages); return this; } public DocumentationContextBuilder produces(Set<String> produces) { this.produces.addAll(produces); return this; } public DocumentationContextBuilder consumes(Set<String> consumes) { this.consumes.addAll(consumes); return this; } public DocumentationContextBuilder genericsNaming(GenericTypeNamingStrategy genericsNamingStrategy) { this.genericsNamingStrategy = genericsNamingStrategy; return this; } public DocumentationContextBuilder protocols(Set<String> protocols) { this.protocols.addAll(protocols); return this; } public DocumentationContextBuilder selector(ApiSelector apiSelector) { this.apiSelector = apiSelector; return this; } public DocumentationContextBuilder pathMapping(Optional<String> pathMapping) { this.pathMapping = pathMapping; return this; } public DocumentationContextBuilder enableUrlTemplating(boolean isUrlTemplatesEnabled) { this.isUrlTemplatesEnabled = isUrlTemplatesEnabled; return this; } public DocumentationContext build() { Map<RequestMethod, List<ResponseMessage>> responseMessages = aggregateResponseMessages(); return new DocumentationContext(documentationType, handlerMappings, apiInfo, groupName, apiSelector, ignorableParameterTypes, responseMessages, globalOperationParameters, resourceGroupingStrategy, pathProvider, securityContexts, securitySchemes, rules, listingReferenceOrdering, apiDescriptionOrdering, operationOrdering, produces, consumes, protocols, genericsNamingStrategy, pathMapping, isUrlTemplatesEnabled); } private Function<Function<TypeResolver, AlternateTypeRule>, AlternateTypeRule> evaluator(final TypeResolver typeResolver) { return new Function<Function<TypeResolver, AlternateTypeRule>, AlternateTypeRule>() { @Override public AlternateTypeRule apply(Function<TypeResolver, AlternateTypeRule> input) { return input.apply(typeResolver); } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Set; import org.apache.logging.log4j.Logger; import org.apache.geode.DataSerializer; import org.apache.geode.cache.CacheException; import org.apache.geode.cache.Operation; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.DistributionAdvisor; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.ReplyMessage; import org.apache.geode.distributed.internal.ReplyProcessor21; import org.apache.geode.internal.Assert; import org.apache.geode.internal.cache.partitioned.PartitionMessage; import org.apache.geode.internal.cache.partitioned.RegionAdvisor; import org.apache.geode.internal.cache.partitioned.RegionAdvisor.PartitionProfile; import org.apache.geode.internal.logging.log4j.LogMarker; import org.apache.geode.internal.serialization.DeserializationContext; import org.apache.geode.internal.serialization.SerializationContext; import org.apache.geode.internal.serialization.Version; import org.apache.geode.logging.internal.log4j.api.LogService; /** * This message is sent for two purposes <br> * 1) To destroy the {@link org.apache.geode.internal.cache.PartitionedRegion} for all members * specified (typically sent to all members that have the <code>PartitionedRegion</code> defined.) * <br> * 2) To inform the other nodes that {@link org.apache.geode.internal.cache.PartitionedRegion} is * closed/locally destroyed or cache is closed on a node<br> * This results in updating of the RegionAdvisor of the remote nodes. * * Sending this message should flush all previous {@link org.apache.geode.cache.Region} operations, * which means this operation should not over-ride * {@link org.apache.geode.internal.cache.partitioned.PartitionMessage#getProcessorId()}. It is * critical guarantee delivery of events sent prior to this message. * * A standard {@link ReplyMessage} is used to send the reply, however any exception that it carries * is ignored, preventing interuption after sending this message. * * @since GemFire 5.0 */ public class DestroyPartitionedRegionMessage extends PartitionMessage { private static final Logger logger = LogService.getLogger(); private Object cbArg; /** The specific destroy operation performed on the sender */ private Operation op; /** Serial number of the region being removed */ private int prSerial; /** Serial numbers of the buckets for this region */ private int bucketSerials[]; /** Event ID of the destroy operation created at the origin */ private EventID eventID; @Override public EventID getEventID() { return eventID; } /** * Empty constructor to satisfy {@link DataSerializer} requirements */ public DestroyPartitionedRegionMessage() {} /** * * @param recipients the set of members on which the partitioned region should be destoryed * @param region the partitioned region * @param processor the processor that the reply will use to notify of the reply. * @see #send(Set, PartitionedRegion, RegionEventImpl, int[]) */ private DestroyPartitionedRegionMessage(Set recipients, PartitionedRegion region, ReplyProcessor21 processor, final RegionEventImpl event, int serials[]) { super(recipients, region.getPRId(), processor); this.cbArg = event.getRawCallbackArgument(); this.op = event.getOperation(); this.prSerial = region.getSerialNumber(); Assert.assertTrue(this.prSerial != DistributionAdvisor.ILLEGAL_SERIAL); this.bucketSerials = serials; this.eventID = event.getEventId(); } /** * * @param recipients set of members who have the PartitionedRegion defined. * @param r the PartitionedRegion to destroy on each member * @return the response on which to wait for the confirmation */ public static DestroyPartitionedRegionResponse send(Set recipients, PartitionedRegion r, final RegionEventImpl event, int serials[]) { Assert.assertTrue(recipients != null, "DestroyMessage NULL recipients set"); DestroyPartitionedRegionResponse resp = new DestroyPartitionedRegionResponse(r.getSystem(), recipients); DestroyPartitionedRegionMessage m = new DestroyPartitionedRegionMessage(recipients, r, resp, event, serials); m.setTransactionDistributed(r.getCache().getTxManager().isDistributed()); r.getDistributionManager().putOutgoing(m); return resp; } @Override protected Throwable processCheckForPR(PartitionedRegion pr, DistributionManager distributionManager) { if (pr != null && !pr.getDistributionAdvisor().isInitialized()) { Throwable thr = new ForceReattemptException( String.format("%s : could not find partitioned region with Id %s", distributionManager.getDistributionManagerId(), pr.getRegionIdentifier())); return thr; } return null; } @Override protected boolean operateOnPartitionedRegion(ClusterDistributionManager dm, PartitionedRegion r, long startTime) throws CacheException { if (r == null) { return true; } if (this.op.isLocal()) { // notify the advisor that the sending member has locally destroyed (or closed) the region PartitionProfile pp = r.getRegionAdvisor().getPartitionProfile(getSender()); if (pp == null) { // Fix for bug#36863 return true; } // final Lock isClosingWriteLock = // r.getRegionAdvisor().getPartitionProfile(getSender()).getIsClosingWriteLock(); Assert.assertTrue(this.prSerial != DistributionAdvisor.ILLEGAL_SERIAL); boolean ok = true; // Examine this peer's profile and look at the serial number in that // profile. If we have a newer profile, ignore the request. int oldSerial = pp.getSerialNumber(); if (DistributionAdvisor.isNewerSerialNumber(oldSerial, this.prSerial)) { ok = false; if (logger.isDebugEnabled()) { logger.debug("Not removing region {} serial requested = {}; actual is {}", r.getName(), this.prSerial, r.getSerialNumber()); } } if (ok) { RegionAdvisor ra = r.getRegionAdvisor(); ra.removeIdAndBuckets(this.sender, this.prSerial, this.bucketSerials, !this.op.isClose()); } sendReply(getSender(), getProcessorId(), dm, null, r, startTime); return false; } // If region's isDestroyed flag is true, we can check if local destroy is done or not and if // NOT, we can invoke destroyPartitionedRegionLocally method. if (r.isDestroyed()) { boolean isClose = this.op.isClose(); r.destroyPartitionedRegionLocally(!isClose); return true; } if (logger.isTraceEnabled(LogMarker.DM_VERBOSE)) { logger.trace(LogMarker.DM_VERBOSE, "{} operateOnRegion: {}", getClass().getName(), r.getFullPath()); } RegionEventImpl event = new RegionEventImpl(r, this.op, this.cbArg, true, r.getMyId(), getEventID()); r.basicDestroyRegion(event, false, false, true); return true; } @Override protected void appendFields(StringBuilder buff) { super.appendFields(buff); buff.append("; cbArg=").append(this.cbArg).append("; op=").append(this.op); buff.append("; prSerial=" + prSerial); buff.append("; bucketSerials (" + bucketSerials.length + ")=("); for (int i = 0; i < bucketSerials.length; i++) { buff.append(Integer.toString(bucketSerials[i])); if (i < bucketSerials.length - 1) { buff.append(", "); } } } @Override public int getDSFID() { return DESTROY_PARTITIONED_REGION_MESSAGE; } @Override public Version[] getSerializationVersions() { return new Version[] {Version.GEODE_1_9_0}; } @Override public void fromData(DataInput in, DeserializationContext context) throws IOException, ClassNotFoundException { fromDataPre_GEODE_1_9_0_0(in, context); this.eventID = DataSerializer.readObject(in); } public void fromDataPre_GEODE_1_9_0_0(DataInput in, DeserializationContext context) throws IOException, ClassNotFoundException { super.fromData(in, context); this.cbArg = DataSerializer.readObject(in); this.op = Operation.fromOrdinal(in.readByte()); this.prSerial = in.readInt(); int len = in.readInt(); this.bucketSerials = new int[len]; for (int i = 0; i < len; i++) { this.bucketSerials[i] = in.readInt(); } } @Override public void toData(DataOutput out, SerializationContext context) throws IOException { toDataPre_GEODE_1_9_0_0(out, context); DataSerializer.writeObject(this.eventID, out); } public void toDataPre_GEODE_1_9_0_0(DataOutput out, SerializationContext context) throws IOException { super.toData(out, context); DataSerializer.writeObject(this.cbArg, out); out.writeByte(this.op.ordinal); out.writeInt(this.prSerial); out.writeInt(this.bucketSerials.length); for (int i = 0; i < this.bucketSerials.length; i++) { out.writeInt(this.bucketSerials[i]); } } /** * The response on which to wait for all the replies. This response ignores any exceptions * received from the "far side" * * @since GemFire 5.0 */ public static class DestroyPartitionedRegionResponse extends ReplyProcessor21 { public DestroyPartitionedRegionResponse(InternalDistributedSystem system, Set initMembers) { super(system, initMembers); } @Override protected void processException(ReplyException ex) { // retry on ForceReattempt in case the region is still being initialized if (ex.getRootCause() instanceof ForceReattemptException) { super.processException(ex); } // other errors are ignored else if (logger.isDebugEnabled()) { logger.debug("DestroyRegionResponse ignoring exception", ex); } } } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.backends.lwjgl3; import java.nio.IntBuffer; import org.lwjgl.BufferUtils; import org.lwjgl.glfw.GLFW; import org.lwjgl.glfw.GLFWDropCallback; import org.lwjgl.glfw.GLFWImage; import org.lwjgl.glfw.GLFWWindowCloseCallback; import org.lwjgl.glfw.GLFWWindowFocusCallback; import org.lwjgl.glfw.GLFWWindowIconifyCallback; import org.lwjgl.glfw.GLFWWindowMaximizeCallback; import org.lwjgl.glfw.GLFWWindowRefreshCallback; import com.badlogic.gdx.ApplicationListener; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Files; import com.badlogic.gdx.graphics.Pixmap; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.Disposable; import com.badlogic.gdx.utils.SharedLibraryLoader; public class Lwjgl3Window implements Disposable { private long windowHandle; private final ApplicationListener listener; private boolean listenerInitialized = false; private Lwjgl3WindowListener windowListener; private Lwjgl3Graphics graphics; private Lwjgl3Input input; private final Lwjgl3ApplicationConfiguration config; private final Array<Runnable> runnables = new Array<Runnable>(); private final Array<Runnable> executedRunnables = new Array<Runnable>(); private final IntBuffer tmpBuffer; private final IntBuffer tmpBuffer2; private boolean iconified = false; private boolean requestRendering = false; private final GLFWWindowFocusCallback focusCallback = new GLFWWindowFocusCallback() { @Override public void invoke(long windowHandle, final boolean focused) { postRunnable(new Runnable() { @Override public void run() { if(windowListener != null) { if(focused) { windowListener.focusGained(); } else { windowListener.focusLost(); } } } }); } }; private final GLFWWindowIconifyCallback iconifyCallback = new GLFWWindowIconifyCallback() { @Override public void invoke(long windowHandle, final boolean iconified) { postRunnable(new Runnable() { @Override public void run() { if(windowListener != null) { windowListener.iconified(iconified); } Lwjgl3Window.this.iconified = iconified; if(iconified) { listener.pause(); } else { listener.resume(); } } }); } }; private final GLFWWindowMaximizeCallback maximizeCallback = new GLFWWindowMaximizeCallback() { @Override public void invoke (long windowHandle, final boolean maximized) { postRunnable(new Runnable() { @Override public void run() { if(windowListener != null) { windowListener.maximized(maximized); } } }); } }; private final GLFWWindowCloseCallback closeCallback = new GLFWWindowCloseCallback() { @Override public void invoke(final long windowHandle) { postRunnable(new Runnable() { @Override public void run() { if(windowListener != null) { if(!windowListener.closeRequested()) { GLFW.glfwSetWindowShouldClose(windowHandle, false); } } } }); } }; private final GLFWDropCallback dropCallback = new GLFWDropCallback() { @Override public void invoke(final long windowHandle, final int count, final long names) { final String[] files = new String[count]; for (int i = 0; i < count; i++) { files[i] = getName(names, i); } postRunnable(new Runnable() { @Override public void run() { if(windowListener != null) { windowListener.filesDropped(files); } } }); } }; private final GLFWWindowRefreshCallback refreshCallback = new GLFWWindowRefreshCallback() { @Override public void invoke(long windowHandle) { postRunnable(new Runnable() { @Override public void run() { if (windowListener != null) { windowListener.refreshRequested(); } } }); } }; Lwjgl3Window(ApplicationListener listener, Lwjgl3ApplicationConfiguration config) { this.listener = listener; this.windowListener = config.windowListener; this.config = config; this.tmpBuffer = BufferUtils.createIntBuffer(1); this.tmpBuffer2 = BufferUtils.createIntBuffer(1); } void create(long windowHandle) { this.windowHandle = windowHandle; this.input = new Lwjgl3Input(this); this.graphics = new Lwjgl3Graphics(this); GLFW.glfwSetWindowFocusCallback(windowHandle, focusCallback); GLFW.glfwSetWindowIconifyCallback(windowHandle, iconifyCallback); GLFW.glfwSetWindowMaximizeCallback(windowHandle, maximizeCallback); GLFW.glfwSetWindowCloseCallback(windowHandle, closeCallback); GLFW.glfwSetDropCallback(windowHandle, dropCallback); GLFW.glfwSetWindowRefreshCallback(windowHandle, refreshCallback); if (windowListener != null) { windowListener.created(this); } } /** @return the {@link ApplicationListener} associated with this window **/ public ApplicationListener getListener() { return listener; } /** @return the {@link Lwjgl3WindowListener} set on this window **/ public Lwjgl3WindowListener getWindowListener() { return windowListener; } public void setWindowListener(Lwjgl3WindowListener listener) { this.windowListener = listener; } /** * Post a {@link Runnable} to this window's event queue. Use this * if you access statics like {@link Gdx#graphics} in your runnable * instead of {@link Application#postRunnable(Runnable)}. */ public void postRunnable(Runnable runnable) { synchronized(runnables) { runnables.add(runnable); } } /** Sets the position of the window in logical coordinates. All monitors * span a virtual surface together. The coordinates are relative to * the first monitor in the virtual surface. **/ public void setPosition(int x, int y) { GLFW.glfwSetWindowPos(windowHandle, x, y); } /** @return the window position in logical coordinates. All monitors * span a virtual surface together. The coordinates are relative to * the first monitor in the virtual surface. **/ public int getPositionX() { GLFW.glfwGetWindowPos(windowHandle, tmpBuffer, tmpBuffer2); return tmpBuffer.get(0); } /** @return the window position in logical coordinates. All monitors * span a virtual surface together. The coordinates are relative to * the first monitor in the virtual surface. **/ public int getPositionY() { GLFW.glfwGetWindowPos(windowHandle, tmpBuffer, tmpBuffer2); return tmpBuffer2.get(0); } /** * Sets the visibility of the window. Invisible windows will still * call their {@link ApplicationListener} */ public void setVisible(boolean visible) { if(visible) { GLFW.glfwShowWindow(windowHandle); } else { GLFW.glfwHideWindow(windowHandle); } } /** * Closes this window and pauses and disposes the associated * {@link ApplicationListener}. */ public void closeWindow() { GLFW.glfwSetWindowShouldClose(windowHandle, true); } /** * Minimizes (iconifies) the window. Iconified windows do not call * their {@link ApplicationListener} until the window is restored. */ public void iconifyWindow() { GLFW.glfwIconifyWindow(windowHandle); } /** * De-minimizes (de-iconifies) and de-maximizes the window. */ public void restoreWindow() { GLFW.glfwRestoreWindow(windowHandle); } /** * Maximizes the window. */ public void maximizeWindow() { GLFW.glfwMaximizeWindow(windowHandle); } /** * Brings the window to front and sets input focus. The window should already be visible and not iconified. */ public void focusWindow() { GLFW.glfwFocusWindow(windowHandle); } /** * Sets the icon that will be used in the window's title bar. Has no effect in macOS, which doesn't use window icons. * @param image One or more images. The one closest to the system's desired size will be scaled. Good sizes include * 16x16, 32x32 and 48x48. Pixmap format {@link Pixmap.Format.RGBA8888 RGBA8888} is preferred so the images will not * have to be copied and converted. The chosen image is copied, and the provided Pixmaps are not disposed. */ public void setIcon (Pixmap... image) { setIcon(windowHandle, image); } static void setIcon(long windowHandle, String[] imagePaths, Files.FileType imageFileType) { if (SharedLibraryLoader.isMac) return; Pixmap[] pixmaps = new Pixmap[imagePaths.length]; for (int i = 0; i < imagePaths.length; i++) { pixmaps[i] = new Pixmap(Gdx.files.getFileHandle(imagePaths[i], imageFileType)); } setIcon(windowHandle, pixmaps); for (Pixmap pixmap : pixmaps) { pixmap.dispose(); } } static void setIcon(long windowHandle, Pixmap[] images) { if (SharedLibraryLoader.isMac) return; GLFWImage.Buffer buffer = GLFWImage.malloc(images.length); Pixmap[] tmpPixmaps = new Pixmap[images.length]; for (int i = 0; i < images.length; i++) { Pixmap pixmap = images[i]; if (pixmap.getFormat() != Pixmap.Format.RGBA8888) { Pixmap rgba = new Pixmap(pixmap.getWidth(), pixmap.getHeight(), Pixmap.Format.RGBA8888); rgba.setBlending(Pixmap.Blending.None); rgba.drawPixmap(pixmap, 0, 0); tmpPixmaps[i] = rgba; pixmap = rgba; } GLFWImage icon = GLFWImage.malloc(); icon.set(pixmap.getWidth(), pixmap.getHeight(), pixmap.getPixels()); buffer.put(icon); icon.free(); } buffer.position(0); GLFW.glfwSetWindowIcon(windowHandle, buffer); buffer.free(); for (Pixmap pixmap : tmpPixmaps) { if (pixmap != null) { pixmap.dispose(); } } } public void setTitle (CharSequence title){ GLFW.glfwSetWindowTitle(windowHandle, title); } /** Sets minimum and maximum size limits for the window. If the window is full screen or not resizable, these limits are * ignored. Use -1 to indicate an unrestricted dimension. */ public void setSizeLimits (int minWidth, int minHeight, int maxWidth, int maxHeight) { setSizeLimits(windowHandle, minWidth, minHeight, maxWidth, maxHeight); } static void setSizeLimits (long windowHandle, int minWidth, int minHeight, int maxWidth, int maxHeight) { GLFW.glfwSetWindowSizeLimits(windowHandle, minWidth > -1 ? minWidth: GLFW.GLFW_DONT_CARE, minHeight > -1 ? minHeight : GLFW.GLFW_DONT_CARE, maxWidth > -1 ? maxWidth : GLFW.GLFW_DONT_CARE, maxHeight > -1 ? maxHeight : GLFW.GLFW_DONT_CARE); } Lwjgl3Graphics getGraphics() { return graphics; } Lwjgl3Input getInput() { return input; } public long getWindowHandle() { return windowHandle; } void windowHandleChanged(long windowHandle) { this.windowHandle = windowHandle; input.windowHandleChanged(windowHandle); } boolean update() { if(!listenerInitialized) { initializeListener(); } synchronized(runnables) { executedRunnables.addAll(runnables); runnables.clear(); } for(Runnable runnable: executedRunnables) { runnable.run(); } boolean shouldRender = executedRunnables.size > 0 || graphics.isContinuousRendering(); executedRunnables.clear(); if (!iconified) input.update(); synchronized (this) { shouldRender |= requestRendering && !iconified; requestRendering = false; } if (shouldRender) { graphics.update(); listener.render(); GLFW.glfwSwapBuffers(windowHandle); } if (!iconified) input.prepareNext(); return shouldRender; } void requestRendering() { synchronized (this) { this.requestRendering = true; } } boolean shouldClose() { return GLFW.glfwWindowShouldClose(windowHandle); } Lwjgl3ApplicationConfiguration getConfig() { return config; } boolean isListenerInitialized() { return listenerInitialized; } void initializeListener() { if(!listenerInitialized) { listener.create(); listener.resize(graphics.getWidth(), graphics.getHeight()); listenerInitialized = true; } } void makeCurrent() { Gdx.graphics = graphics; Gdx.gl30 = graphics.getGL30(); Gdx.gl20 = Gdx.gl30 != null ? Gdx.gl30 : graphics.getGL20(); Gdx.gl = Gdx.gl30 != null ? Gdx.gl30 : Gdx.gl20; Gdx.input = input; GLFW.glfwMakeContextCurrent(windowHandle); } @Override public void dispose() { listener.pause(); listener.dispose(); Lwjgl3Cursor.dispose(this); graphics.dispose(); input.dispose(); GLFW.glfwSetWindowFocusCallback(windowHandle, null); GLFW.glfwSetWindowIconifyCallback(windowHandle, null); GLFW.glfwSetWindowCloseCallback(windowHandle, null); GLFW.glfwSetDropCallback(windowHandle, null); GLFW.glfwDestroyWindow(windowHandle); focusCallback.free(); iconifyCallback.free(); maximizeCallback.free(); closeCallback.free(); dropCallback.free(); refreshCallback.free(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (int) (windowHandle ^ (windowHandle >>> 32)); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Lwjgl3Window other = (Lwjgl3Window) obj; if (windowHandle != other.windowHandle) return false; return true; } }
package com.atlassian.plugin.loaders; import com.atlassian.plugin.DefaultPluginArtifactFactory; import com.atlassian.plugin.ModuleDescriptorFactory; import com.atlassian.plugin.Plugin; import com.atlassian.plugin.PluginArtifact; import com.atlassian.plugin.PluginArtifactFactory; import com.atlassian.plugin.PluginException; import com.atlassian.plugin.PluginParseException; import com.atlassian.plugin.PluginState; import com.atlassian.plugin.event.PluginEventListener; import com.atlassian.plugin.event.PluginEventManager; import com.atlassian.plugin.event.events.PluginFrameworkShutdownEvent; import com.atlassian.plugin.factories.PluginFactory; import com.atlassian.plugin.impl.UnloadablePlugin; import com.atlassian.plugin.loaders.classloading.DeploymentUnit; import com.atlassian.plugin.loaders.classloading.Scanner; import org.apache.commons.lang.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.Collections; /** * Plugin loader that delegates the detection of plugins to a Scanner instance. The scanner may monitor the contents * of a directory on disk, a database, or any other place plugins may be hidden. * * @since 2.1.0 */ public class ScanningPluginLoader implements DynamicPluginLoader { private static final Logger log = LoggerFactory.getLogger(ScanningPluginLoader.class); protected final com.atlassian.plugin.loaders.classloading.Scanner scanner; protected final Map<DeploymentUnit, Plugin> plugins; protected final List<PluginFactory> pluginFactories; protected final PluginArtifactFactory pluginArtifactFactory; /** * Constructor that provides a default plugin artifact factory * ` * @param scanner The scanner to use to detect new plugins * @param pluginFactories The deployers that will handle turning an artifact into a plugin * @param pluginEventManager The event manager, used for listening for shutdown events * @since 2.0.0 */ public ScanningPluginLoader(final Scanner scanner, final List<PluginFactory> pluginFactories, final PluginEventManager pluginEventManager) { this(scanner, pluginFactories, new DefaultPluginArtifactFactory(), pluginEventManager); } /** * Construct a new scanning plugin loader with no default values * * @param scanner The scanner to use to detect new plugins * @param pluginFactories The deployers that will handle turning an artifact into a plugin * @param pluginArtifactFactory used to create new plugin artifacts from an URL * @param pluginEventManager The event manager, used for listening for shutdown events * @since 2.0.0 */ public ScanningPluginLoader(final Scanner scanner, final List<PluginFactory> pluginFactories, final PluginArtifactFactory pluginArtifactFactory, final PluginEventManager pluginEventManager) { Validate.notNull(pluginFactories, "The list of plugin factories must be specified"); Validate.notNull(pluginEventManager, "The event manager must be specified"); Validate.notNull(scanner, "The scanner must be specified"); plugins = new TreeMap<DeploymentUnit, Plugin>(); this.pluginArtifactFactory = pluginArtifactFactory; this.scanner = scanner; this.pluginFactories = new ArrayList<PluginFactory>(pluginFactories); pluginEventManager.register(this); } public Collection<Plugin> loadAllPlugins(final ModuleDescriptorFactory moduleDescriptorFactory) { scanner.scan(); for (final DeploymentUnit deploymentUnit : scanner.getDeploymentUnits()) { Plugin plugin = deployPluginFromUnit(deploymentUnit, moduleDescriptorFactory); plugin = postProcess(plugin); plugins.put(deploymentUnit, plugin); } if (scanner.getDeploymentUnits().isEmpty()) { log.info("No plugins found to be deployed"); } return Collections.unmodifiableCollection(new ArrayList<Plugin>(plugins.values())); } protected Plugin deployPluginFromUnit(final DeploymentUnit deploymentUnit, final ModuleDescriptorFactory moduleDescriptorFactory) { Plugin plugin = null; String errorText = "No plugin factories found for plugin file " + deploymentUnit; String pluginKey = null; for (final PluginFactory factory : pluginFactories) { try { final PluginArtifact artifact = pluginArtifactFactory.create(deploymentUnit.getPath().toURI()); pluginKey = factory.canCreate(artifact); if (pluginKey != null) { plugin = factory.create(artifact, moduleDescriptorFactory); if (plugin != null) { break; } } } catch (final Throwable ex) { log.error("Unable to deploy plugin '" + pluginKey + "', file " + deploymentUnit, ex); errorText = ex.getMessage(); break; } } if (plugin == null) { plugin = new UnloadablePlugin(errorText); if (pluginKey != null) { plugin.setKey(pluginKey); } else { plugin.setKey(deploymentUnit.getPath().getName()); } } else { log.info("Plugin " + deploymentUnit + " created"); } return plugin; } public boolean supportsRemoval() { return true; } public boolean supportsAddition() { return true; } /** * @return all plugins, now loaded by the pluginLoader, which have been discovered and added since the * last time a check was performed. */ public Collection<Plugin> addFoundPlugins(final ModuleDescriptorFactory moduleDescriptorFactory) throws PluginParseException { // find missing plugins final Collection<DeploymentUnit> updatedDeploymentUnits = scanner.scan(); // create list while updating internal state final List<Plugin> foundPlugins = new ArrayList<Plugin>(); for (final DeploymentUnit deploymentUnit : updatedDeploymentUnits) { if (!plugins.containsKey(deploymentUnit)) { Plugin plugin = deployPluginFromUnit(deploymentUnit, moduleDescriptorFactory); plugin = postProcess(plugin); plugins.put(deploymentUnit, plugin); foundPlugins.add(plugin); } } if (foundPlugins.isEmpty()) { log.info("No plugins found to be installed"); } return foundPlugins; } /** * @param plugin - the plugin to remove * @throws com.atlassian.plugin.PluginException representing the reason for failure. */ public void removePlugin(final Plugin plugin) throws PluginException { if (plugin.getPluginState() == PluginState.ENABLED) { throw new PluginException("Cannot remove an enabled plugin"); } if (!plugin.isUninstallable()) { throw new PluginException("Cannot remove an uninstallable plugin: [" + plugin.getName() + "]"); } final DeploymentUnit deploymentUnit = findMatchingDeploymentUnit(plugin); plugin.uninstall(); try { // Loop over to see if there are any other deployment units with the same filename. This will happen // if a newer plugin is uploaded with the same filename as the plugin being removed: in this case the // old one has already been deleted boolean found = false; for (final DeploymentUnit unit : plugins.keySet()) { if (unit.getPath().equals(deploymentUnit.getPath()) && !unit.equals(deploymentUnit)) { found = true; break; } } if (!found) { scanner.remove(deploymentUnit); } } catch (final SecurityException e) { throw new PluginException(e); } plugins.remove(deploymentUnit); log.info("Removed plugin " + plugin.getKey()); } private DeploymentUnit findMatchingDeploymentUnit(final Plugin plugin) throws PluginException { DeploymentUnit deploymentUnit = null; for (final Map.Entry<DeploymentUnit, Plugin> entry : plugins.entrySet()) { // no, you don't want to use entry.getValue().equals(plugin) here as it breaks upgrades where it is a new // version of the plugin but the key and version number hasn't changed, and hence, equals() will always return // true if (entry.getValue() == plugin) { deploymentUnit = entry.getKey(); break; } } if (deploymentUnit == null) { throw new PluginException("This pluginLoader has no memory of deploying the plugin you are trying remove: [" + plugin.getName() + "]"); } return deploymentUnit; } /** * Called during plugin framework shutdown * @param event The shutdown event */ @PluginEventListener public void onShutdown(final PluginFrameworkShutdownEvent event) { for (final Iterator<Plugin> it = plugins.values().iterator(); it.hasNext();) { final Plugin plugin = it.next(); if (plugin.isUninstallable()) { plugin.uninstall(); } it.remove(); } scanner.reset(); } /** * @deprecated Since 2.0.0, shutdown will automatically occur when the plugin framework is shutdown */ @Deprecated public void shutDown() { onShutdown(null); } /** * Determines if the artifact can be loaded by any of its deployers * * @param pluginArtifact The artifact to test * @return True if this artifact can be loaded by this loader * @throws com.atlassian.plugin.PluginParseException */ public String canLoad(final PluginArtifact pluginArtifact) throws PluginParseException { String pluginKey = null; for (final PluginFactory factory : pluginFactories) { pluginKey = factory.canCreate(pluginArtifact); if (pluginKey != null) { break; } } return pluginKey; } /** * Template method that can be used by a specific {@link PluginLoader} to * add information to a {@link Plugin} after it has been loaded. * * @param plugin a plugin that has been loaded * @since v2.2.0 */ protected Plugin postProcess(final Plugin plugin) { return plugin; } }
package eu.atos.sla.service.rest.helpers; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import eu.atos.sla.dao.IAgreementDAO; import eu.atos.sla.dao.IEnforcementJobDAO; import eu.atos.sla.dao.IGuaranteeTermDAO; import eu.atos.sla.dao.IProviderDAO; import eu.atos.sla.dao.ITemplateDAO; import eu.atos.sla.datamodel.IAgreement; import eu.atos.sla.datamodel.IEnforcementJob; import eu.atos.sla.datamodel.IGuaranteeTerm; import eu.atos.sla.datamodel.IGuaranteeTerm.GuaranteeTermStatusEnum; import eu.atos.sla.datamodel.IProvider; import eu.atos.sla.datamodel.ITemplate; import eu.atos.sla.enforcement.IEnforcementService; import eu.atos.sla.parser.data.GuaranteeTermStatus; import eu.atos.sla.parser.data.GuaranteeTermsStatus; import eu.atos.sla.parser.data.wsag.Agreement; import eu.atos.sla.parser.data.wsag.Context; import eu.atos.sla.service.rest.helpers.exception.DBExistsHelperException; import eu.atos.sla.service.rest.helpers.exception.DBMissingHelperException; import eu.atos.sla.service.rest.helpers.exception.InternalHelperException; import eu.atos.sla.service.rest.helpers.exception.ParserHelperException; import eu.atos.sla.util.IModelConverter; import eu.atos.sla.util.ModelConversionException; /** * * @author Elena Garrido */ @Service @Transactional public class AgreementHelperE{ private static Logger logger = LoggerFactory.getLogger(AgreementHelperE.class); @Autowired private IAgreementDAO agreementDAO; @Autowired private IGuaranteeTermDAO guaranteeTermDAO; @Autowired private IProviderDAO providerDAO; @Autowired private ITemplateDAO templateDAO; @Autowired private IModelConverter modelConverter; @Autowired private IEnforcementJobDAO enforcementJobDAO; @Autowired private IEnforcementService enforcementService; public AgreementHelperE() { } private boolean doesAgreementIdExistInRepository(String agreementId) { return agreementDAO.getByAgreementId(agreementId) != null; } private boolean doesEnforcementExistInRepository(String agreementId) { return enforcementJobDAO.getByAgreementId(agreementId) != null; } private IProvider providerFromRepository(String providerUUID) { if (providerUUID==null) return null; return providerDAO.getByUUID(providerUUID) ; } private ITemplate templateFromRepository(String templateUUID) { if (templateUUID==null) return null; return templateDAO.getByUuid(templateUUID); } public String createAgreement(Agreement agreementXML, String originalSerializedAgreement) throws DBMissingHelperException, DBExistsHelperException, InternalHelperException, ParserHelperException { logger.debug("StartOf createAgreement payload:{}", originalSerializedAgreement); try{ IAgreement agreementStored = null; if (agreementXML != null) { // add field AggrementId if it doesn't exist if (agreementXML.getAgreementId() == null) { String agreementId = UUID.randomUUID().toString(); logger.debug("createAgreement agreement has no uuid, {} will be assigned", agreementId); originalSerializedAgreement = setAgreementIdInSerializedAgreement(originalSerializedAgreement, agreementId); agreementXML.setAgreementId(agreementId); } if (!doesAgreementIdExistInRepository(agreementXML.getAgreementId())) { IAgreement agreement = modelConverter.getAgreementFromAgreementXML(agreementXML, originalSerializedAgreement); String providerUuid = agreement.getProvider().getUuid(); IProvider provider = providerFromRepository(providerUuid); if (provider == null) { throw new DBMissingHelperException("Provider with id:"+ providerUuid+ " doesn't exist SLA Repository Database"); } agreement.setProvider(provider); String templateUuid = agreement.getTemplate().getUuid(); ITemplate template = templateFromRepository(templateUuid); if (template == null) { throw new DBMissingHelperException("Template with id:"+ templateUuid+ " doesn't exist SLA Repository Database"); } agreement.setTemplate(template); agreementStored = this.agreementDAO.save(agreement); /* create an stopped enforcement job */ if (!doesEnforcementExistInRepository(agreementStored.getAgreementId())) { // the enforcement doesn't eist IEnforcementJob ejob = enforcementService.createEnforcementJob(agreementStored.getAgreementId()); logger.debug("EnforcementJob {} created", ejob.getId()); } else { throw new DBExistsHelperException("Enforcement with id:" + agreementStored.getAgreementId() + " already exists in the SLA Repository Database"); } } else { throw new DBExistsHelperException("Agreement with id:"+ agreementXML.getAgreementId()+ " already exists in the SLA Repository Database"); } } if (agreementStored != null) { logger.debug("EndOf createAgreement"); return agreementStored.getAgreementId(); } else{ logger.debug("EndOf createAgreement"); throw new InternalHelperException("Error when creating agreement the SLA Repository Database"); } } catch (ModelConversionException e) { logger.error("Error in createAgreement " , e); throw new ParserHelperException("Error when creating:" + e.getMessage() ); } } private GuaranteeTermsStatus getGuaranteeStatus(String agreementId, List<IGuaranteeTerm> guaranteeTerms) { // Pojo GuaranteeTermsStatus GuaranteeTermsStatus guaranteeTermsStatus = new GuaranteeTermsStatus(); List<GuaranteeTermStatus> guaranteeTermStatusList = new ArrayList<GuaranteeTermStatus>(); // Status GuaranteTerm GuaranteeTermStatusEnum agreementStatus = AgreementStatusCalculator.getStatus(guaranteeTerms); guaranteeTermsStatus.setAgreementId(agreementId); guaranteeTermsStatus.setValue(agreementStatus.toString()); // Fill GuaranteeTermsStatus pojo for (IGuaranteeTerm guaranteeTerm : guaranteeTerms) { GuaranteeTermStatus guaranteeTermStatus = new GuaranteeTermStatus(); guaranteeTermStatus.setName(guaranteeTerm.getName()); guaranteeTermStatus.setValue(guaranteeTerm.getStatus().toString()); guaranteeTermStatusList.add(guaranteeTermStatus); } guaranteeTermsStatus.setGuaranteeTermsStatus(guaranteeTermStatusList); return guaranteeTermsStatus; } public List<IAgreement> getAgreements(String consumerId, String providerId, String templateId, Boolean active) { logger.debug("StartOf getAgreements consumerId:{} - providerId:{} - templateId:{} - active:{}", consumerId, providerId, templateId, active); List<IAgreement> agreements = agreementDAO.search(consumerId, providerId, templateId, active); logger.debug("EndOf getAgreements"); return agreements; } public IAgreement getAgreementByID(String id) { logger.debug("StartOf getAgreementByID id:{}", id); IAgreement agreement = agreementDAO.getByAgreementId(id); logger.debug("EndOf getAgreementByID"); return agreement; } public Context getAgreementContextByID(String id) throws InternalHelperException { logger.debug("StartOf getAgreementContextByID id:{}", id); IAgreement agreement = agreementDAO.getByAgreementId(id); Context context = null; try { if (agreement!= null) context = modelConverter.getContextFromAgreement(agreement); } catch (ModelConversionException e) { logger.error("Error getAgreementContextByID ",e); throw new InternalHelperException(e.getMessage()); } logger.debug("EndOf getAgreementContextByID"); return context; } public List<IAgreement> getActiveAgreements(long actualDate) { logger.debug("StartOf getActiveAgreements actualDate:{}", actualDate); List<IAgreement> agreements = agreementDAO.getByActiveAgreements(actualDate); logger.debug("EndOf getActiveAgreements"); return agreements; } public boolean deleteByAgreementId(String agreementId) { logger.debug("StartOf deleteByAgreementId agreementId:{}", agreementId); boolean deleted = false; IEnforcementJob enforcementJob = enforcementJobDAO.getByAgreementId(agreementId); if (enforcementJob!=null){ logger.debug("EnforcementJob exists associated to agreementId {} it will be stopped and removed", agreementId); enforcementJobDAO.delete(enforcementJob); } IAgreement agreement = agreementDAO.getByAgreementId(agreementId); if (agreement != null) { deleted = this.agreementDAO.delete(agreement); } logger.debug("EndOf deleteByAgreementId"); return deleted; } public GuaranteeTermsStatus getAgreementStatus(String id) throws DBMissingHelperException{ logger.debug("StartOf getAgreementStatus id:{}", id); IAgreement agreement = agreementDAO.getByAgreementId(id); if (agreement == null) throw new DBMissingHelperException("The agreementId " + id + " doesn't exist"); List<IGuaranteeTerm> guaranteeTerms = agreement.getGuaranteeTerms(); GuaranteeTermsStatus guaranteeTermsStatus = getGuaranteeStatus(id, guaranteeTerms); logger.debug("EndOf getAgreementStatus"); return guaranteeTermsStatus; } private String setAgreementIdInSerializedAgreement(String serializedAgreement, String agreementId){ return serializedAgreement.replaceAll( "<wsag:Agreement xmlns:wsag=\"http://www.ggf.org/namespaces/ws-agreement\" xmlns:sla=\"http://sla.atos.eu\">", "<wsag:Agreement xmlns:wsag=\"http://www.ggf.org/namespaces/ws-agreement\" xmlns:sla=\"http://sla.atos.eu\" wsag:AgreementId=\""+ agreementId + "\">"); } public List<IAgreement> getAgreementsPerTemplateAndConsumer(String consumerId, String templateUUID) { logger.debug("StartOf getAgreementsPerTemplateAndConsumer consumerId:"+consumerId+ " - templateUUID:"+templateUUID); List<IAgreement> agreements = agreementDAO.searchPerTemplateAndConsumer(consumerId, templateUUID); logger.debug("EndOf getAgreementsPerTemplateAndConsumer"); return agreements; } public static class AgreementStatusCalculator { public static GuaranteeTermStatusEnum getStatus(List<IGuaranteeTerm> guaranteeTerms) { GuaranteeTermStatusEnum result = GuaranteeTermStatusEnum.FULFILLED; if (guaranteeTerms.size() == 0) { result = GuaranteeTermStatusEnum.NON_DETERMINED; } else { result = GuaranteeTermStatusEnum.FULFILLED; for (IGuaranteeTerm guaranteeTerm : guaranteeTerms) { GuaranteeTermStatusEnum termStatus = guaranteeTerm.getStatus(); if (termStatus == null || termStatus == GuaranteeTermStatusEnum.NON_DETERMINED) { result = GuaranteeTermStatusEnum.NON_DETERMINED; } else if (termStatus == GuaranteeTermStatusEnum.VIOLATED) { result = GuaranteeTermStatusEnum.VIOLATED; break; } } } return result; } } }
/******************************************************************************* * * Copyright (C) 2015-2022 the BBoxDB project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *******************************************************************************/ package org.bboxdb.tools.gui.views.query; import java.awt.Color; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.LinkedTransferQueue; import java.util.concurrent.TransferQueue; import org.bboxdb.misc.BBoxDBException; import org.bboxdb.network.client.BBoxDBCluster; import org.bboxdb.network.client.ContinuousQueryState; import org.bboxdb.network.client.future.client.JoinedTupleListFuture; import org.bboxdb.query.ContinuousQueryPlan; import org.bboxdb.storage.entity.IdleQueryStateRemovedTuple; import org.bboxdb.storage.entity.InvalidationTuple; import org.bboxdb.storage.entity.MultiTuple; import org.bboxdb.storage.entity.Tuple; import org.bboxdb.storage.entity.TupleStoreName; import org.bboxdb.storage.entity.WatermarkTuple; import org.bboxdb.storage.sstable.SSTableConst; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ContinuousQueryRunable extends AbstractContinuousQueryRunable { /** * The color of the results */ private final List<Color> colors; /** * The query result */ private JoinedTupleListFuture queryResult; /** * The seen watermarks */ private final Set<Long> seenWatermarks; /** * The waiting queue if watermarks are active */ private final TransferQueue<MultiTuple> watermarkQueue; /** * The logger */ private final static Logger logger = LoggerFactory.getLogger(ContinuousQueryRunable.class); public ContinuousQueryRunable(final List<Color> colors, final ContinuousQueryPlan qp, final BBoxDBCluster connection, final ElementOverlayPainter painter) { super(qp, connection, painter); this.colors = colors; this.seenWatermarks = new HashSet<>(); this.watermarkQueue = new LinkedTransferQueue<>(); } @Override protected void runThread() throws Exception { logger.info("New worker thread for a continuous query has started"); queryResult = connection.queryContinuous(qp); try { queryResult.waitForCompletion(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return; } for(final MultiTuple joinedTuple : queryResult) { if(queryResult.isFailed()) { logger.error("Got an error" + queryResult.getAllMessages()); return; } if(Thread.currentThread().isInterrupted()) { return; } final Tuple firstTuple = joinedTuple.getTuple(0); if(firstTuple instanceof WatermarkTuple) { handleWatermark(firstTuple); } else { // Update stale tuples only if we don't receive watermarks // otherwise, the stale tuples are removed when all watermarks are present if(! qp.isReceiveWatermarks()) { if (firstTuple instanceof InvalidationTuple || firstTuple instanceof IdleQueryStateRemovedTuple) { removeTupleFromView(joinedTuple); } else { updateTupleOnGui(joinedTuple, colors, true); removeStaleTupleIfNeeded(); } } else { // When watermarks active, queue elements final boolean queueResult = watermarkQueue.offer(joinedTuple); if(! queueResult) { logger.warn("Unable to queue tuple {}", joinedTuple); } } } } } /** * Handle the watermark tuple * @param firstTuple */ private void handleWatermark(final Tuple tuple) { logger.info("Handle watermark {}", tuple); final String tupleStoreString = tuple.getKey().replace(SSTableConst.WATERMARK_KEY + "_", ""); final TupleStoreName tupleStore = new TupleStoreName(tupleStoreString); final long regionId = tupleStore.getRegionId().getAsLong(); seenWatermarks.add(regionId); final String queryUUID = qp.getQueryUUID(); final Optional<ContinuousQueryState> queryStateOptional = connection.getContinousQueryState(queryUUID); if(! queryStateOptional.isPresent()) { logger.error("Query state is not present, unable to handle watermark"); return; } final ContinuousQueryState queryState = queryStateOptional.get(); // All watermarks are present if(seenWatermarks.size() == queryState.getRegisteredRegions().size()) { handleWatermarkDone(); } } /** * Handle the completion of the watermark */ private void handleWatermarkDone() { logger.debug("Watermark complete: {}", seenWatermarks); // Paint pending elements final List<MultiTuple> queue = new LinkedList<>(); watermarkQueue.drainTo(queue); logger.info("Processing {} waiting elements", queue.size()); for(final MultiTuple multiTuple : queue) { final Tuple firstTuple = multiTuple.getTuple(0); if (firstTuple instanceof InvalidationTuple) { removeTupleFromView(multiTuple); } else { updateTupleOnGui(multiTuple, colors, false); } } seenWatermarks.clear(); removeStaleTupleIfNeeded(); painter.repaintAll(); } @Override protected void endHook() { if(queryResult != null) { logger.info("Canceling continuous query"); // Clear interrupted flag, to be able to cancel the query on server boolean interruptedFlag = Thread.interrupted(); try { logger.info("Canceling continous query {}", qp.getQueryUUID()); connection.cancelContinousQuery(qp.getQueryUUID()); } catch (BBoxDBException e) { logger.error("Got exception", e); } catch (InterruptedException e) { logger.info("Got interrupted exception"); interruptedFlag = true; } finally { // Restore interrupted flag if(interruptedFlag) { Thread.currentThread().interrupt(); } } } logger.info("Worker for continuous query exited"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.control.nc; import java.io.File; import java.io.IOException; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.lang.management.MemoryUsage; import java.lang.management.OperatingSystemMXBean; import java.lang.management.RuntimeMXBean; import java.lang.management.ThreadMXBean; import java.net.InetSocketAddress; import java.util.Arrays; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang3.mutable.Mutable; import org.apache.commons.lang3.mutable.MutableObject; import org.apache.hyracks.api.application.INCApplication; import org.apache.hyracks.api.client.NodeControllerInfo; import org.apache.hyracks.api.comm.NetworkAddress; import org.apache.hyracks.api.dataset.IDatasetPartitionManager; import org.apache.hyracks.api.deployment.DeploymentId; import org.apache.hyracks.api.exceptions.ErrorCode; import org.apache.hyracks.api.exceptions.HyracksException; import org.apache.hyracks.api.io.IODeviceHandle; import org.apache.hyracks.api.job.ActivityClusterGraph; import org.apache.hyracks.api.job.DeployedJobSpecId; import org.apache.hyracks.api.job.JobId; import org.apache.hyracks.api.job.JobParameterByteStore; import org.apache.hyracks.api.lifecycle.ILifeCycleComponentManager; import org.apache.hyracks.api.lifecycle.LifeCycleComponentManager; import org.apache.hyracks.api.service.IControllerService; import org.apache.hyracks.control.common.base.IClusterController; import org.apache.hyracks.control.common.config.ConfigManager; import org.apache.hyracks.control.common.context.ServerContext; import org.apache.hyracks.control.common.controllers.NCConfig; import org.apache.hyracks.control.common.controllers.NodeParameters; import org.apache.hyracks.control.common.controllers.NodeRegistration; import org.apache.hyracks.control.common.heartbeat.HeartbeatData; import org.apache.hyracks.control.common.heartbeat.HeartbeatSchema; import org.apache.hyracks.control.common.ipc.CCNCFunctions; import org.apache.hyracks.control.common.ipc.ClusterControllerRemoteProxy; import org.apache.hyracks.control.common.ipc.IControllerRemoteProxyIPCEventListener; import org.apache.hyracks.control.common.job.profiling.om.JobProfile; import org.apache.hyracks.control.common.work.FutureValue; import org.apache.hyracks.control.common.work.WorkQueue; import org.apache.hyracks.control.nc.application.NCServiceContext; import org.apache.hyracks.control.nc.dataset.DatasetPartitionManager; import org.apache.hyracks.control.nc.io.IOManager; import org.apache.hyracks.control.nc.io.profiling.IIOCounter; import org.apache.hyracks.control.nc.io.profiling.IOCounterFactory; import org.apache.hyracks.control.nc.net.DatasetNetworkManager; import org.apache.hyracks.control.nc.net.MessagingNetworkManager; import org.apache.hyracks.control.nc.net.NetworkManager; import org.apache.hyracks.control.nc.partitions.PartitionManager; import org.apache.hyracks.control.nc.resources.memory.MemoryManager; import org.apache.hyracks.control.nc.work.BuildJobProfilesWork; import org.apache.hyracks.ipc.api.IIPCHandle; import org.apache.hyracks.ipc.api.IPCPerformanceCounters; import org.apache.hyracks.ipc.exceptions.IPCException; import org.apache.hyracks.ipc.impl.IPCSystem; import org.apache.hyracks.net.protocols.muxdemux.FullFrameChannelInterfaceFactory; import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters; import org.apache.hyracks.util.ExitUtil; import org.apache.hyracks.util.PidHelper; import org.apache.hyracks.util.trace.ITracer; import org.apache.hyracks.util.trace.Tracer; import org.kohsuke.args4j.CmdLineException; public class NodeControllerService implements IControllerService { private static final Logger LOGGER = Logger.getLogger(NodeControllerService.class.getName()); private static final double MEMORY_FUDGE_FACTOR = 0.8; private static final long ONE_SECOND_NANOS = TimeUnit.SECONDS.toNanos(1); private NCConfig ncConfig; private final String id; private final IOManager ioManager; private IPCSystem ipc; private PartitionManager partitionManager; private NetworkManager netManager; private IDatasetPartitionManager datasetPartitionManager; private DatasetNetworkManager datasetNetworkManager; private final WorkQueue workQueue; private final Timer timer; private boolean registrationPending; private Exception registrationException; private IClusterController ccs; private final Map<JobId, Joblet> jobletMap; private final Map<Long, ActivityClusterGraph> deployedJobSpecActivityClusterGraphMap; private final Map<JobId, JobParameterByteStore> jobParameterByteStoreMap = new HashMap<>(); private ExecutorService executor; private NodeParameters nodeParameters; private Thread heartbeatThread; private final ServerContext serverCtx; private NCServiceContext serviceCtx; private final INCApplication application; private final ILifeCycleComponentManager lccm; private final MemoryMXBean memoryMXBean; private final List<GarbageCollectorMXBean> gcMXBeans; private final ThreadMXBean threadMXBean; private final RuntimeMXBean runtimeMXBean; private final OperatingSystemMXBean osMXBean; private final Mutable<FutureValue<Map<String, NodeControllerInfo>>> getNodeControllerInfosAcceptor; private final MemoryManager memoryManager; private StackTraceElement[] shutdownCallStack; private IIOCounter ioCounter; private MessagingNetworkManager messagingNetManager; private final ConfigManager configManager; private NodeRegistration nodeRegistration; private final AtomicLong maxJobId = new AtomicLong(-1); static { ExitUtil.init(); } public NodeControllerService(NCConfig config) throws Exception { this(config, getApplication(config)); } public NodeControllerService(NCConfig config, INCApplication application) throws IOException, CmdLineException { ncConfig = config; configManager = ncConfig.getConfigManager(); if (application == null) { throw new IllegalArgumentException("INCApplication cannot be null"); } configManager.processConfig(); this.application = application; id = ncConfig.getNodeId(); if (id == null) { throw new HyracksException("id not set"); } lccm = new LifeCycleComponentManager(); if (LOGGER.isLoggable(Level.INFO)) { LOGGER.info("Setting uncaught exception handler " + getLifeCycleComponentManager()); } // Set shutdown hook before so it doesn't have the same uncaught exception handler Runtime.getRuntime().addShutdownHook(new NCShutdownHook(this)); Thread.currentThread().setUncaughtExceptionHandler(getLifeCycleComponentManager()); ioManager = new IOManager(IODeviceHandle.getDevices(ncConfig.getIODevices()), application.getFileDeviceResolver()); workQueue = new WorkQueue(id, Thread.NORM_PRIORITY); // Reserves MAX_PRIORITY of the heartbeat thread. jobletMap = new Hashtable<>(); deployedJobSpecActivityClusterGraphMap = new Hashtable<>(); timer = new Timer(true); serverCtx = new ServerContext(ServerContext.ServerType.NODE_CONTROLLER, new File(new File(NodeControllerService.class.getName()), id)); memoryMXBean = ManagementFactory.getMemoryMXBean(); gcMXBeans = ManagementFactory.getGarbageCollectorMXBeans(); threadMXBean = ManagementFactory.getThreadMXBean(); runtimeMXBean = ManagementFactory.getRuntimeMXBean(); osMXBean = ManagementFactory.getOperatingSystemMXBean(); getNodeControllerInfosAcceptor = new MutableObject<>(); memoryManager = new MemoryManager((long) (memoryMXBean.getHeapMemoryUsage().getMax() * MEMORY_FUDGE_FACTOR)); ioCounter = IOCounterFactory.INSTANCE.getIOCounter(); } public IOManager getIoManager() { return ioManager; } @Override public NCServiceContext getContext() { return serviceCtx; } public ILifeCycleComponentManager getLifeCycleComponentManager() { return lccm; } synchronized void setNodeRegistrationResult(NodeParameters parameters, Exception exception) { this.nodeParameters = parameters; this.registrationException = exception; this.registrationPending = false; notifyAll(); } public Map<String, NodeControllerInfo> getNodeControllersInfo() throws Exception { FutureValue<Map<String, NodeControllerInfo>> fv = new FutureValue<>(); synchronized (getNodeControllerInfosAcceptor) { while (getNodeControllerInfosAcceptor.getValue() != null) { getNodeControllerInfosAcceptor.wait(); } getNodeControllerInfosAcceptor.setValue(fv); } ccs.getNodeControllerInfos(); return fv.get(); } void setNodeControllersInfo(Map<String, NodeControllerInfo> ncInfos) { FutureValue<Map<String, NodeControllerInfo>> fv; synchronized (getNodeControllerInfosAcceptor) { fv = getNodeControllerInfosAcceptor.getValue(); getNodeControllerInfosAcceptor.setValue(null); getNodeControllerInfosAcceptor.notifyAll(); } fv.setValue(ncInfos); } private void init() throws Exception { ioManager.setExecutor(executor); datasetPartitionManager = new DatasetPartitionManager(this, executor, ncConfig.getResultManagerMemory(), ncConfig.getResultTTL(), ncConfig.getResultSweepThreshold()); datasetNetworkManager = new DatasetNetworkManager(ncConfig.getResultListenAddress(), ncConfig.getResultListenPort(), datasetPartitionManager, ncConfig.getNetThreadCount(), ncConfig.getNetBufferCount(), ncConfig.getResultPublicAddress(), ncConfig.getResultPublicPort(), FullFrameChannelInterfaceFactory.INSTANCE); if (ncConfig.getMessagingListenAddress() != null && serviceCtx.getMessagingChannelInterfaceFactory() != null) { messagingNetManager = new MessagingNetworkManager(this, ncConfig.getMessagingListenAddress(), ncConfig.getMessagingListenPort(), ncConfig.getNetThreadCount(), ncConfig.getMessagingPublicAddress(), ncConfig.getMessagingPublicPort(), serviceCtx.getMessagingChannelInterfaceFactory()); } } @Override public void start() throws Exception { LOGGER.log(Level.INFO, "Starting NodeControllerService"); ipc = new IPCSystem(new InetSocketAddress(ncConfig.getClusterListenAddress(), ncConfig.getClusterListenPort()), new NodeControllerIPCI(this), new CCNCFunctions.SerializerDeserializer()); ipc.start(); partitionManager = new PartitionManager(this); netManager = new NetworkManager(ncConfig.getDataListenAddress(), ncConfig.getDataListenPort(), partitionManager, ncConfig.getNetThreadCount(), ncConfig.getNetBufferCount(), ncConfig.getDataPublicAddress(), ncConfig.getDataPublicPort(), FullFrameChannelInterfaceFactory.INSTANCE); netManager.start(); startApplication(); init(); datasetNetworkManager.start(); if (messagingNetManager != null) { messagingNetManager.start(); } this.ccs = new ClusterControllerRemoteProxy(ipc, new InetSocketAddress(ncConfig.getClusterAddress(), ncConfig.getClusterPort()), ncConfig.getClusterConnectRetries(), new IControllerRemoteProxyIPCEventListener() { @Override public void ipcHandleRestored(IIPCHandle handle) throws IPCException { // we need to re-register in case of NC -> CC connection reset try { registerNode(); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed Registering with cc", e); throw new IPCException(e); } } }); registerNode(); workQueue.start(); // Schedule tracing a human-readable datetime timer.schedule(new TraceCurrentTimeTask(serviceCtx.getTracer()), 0, 60000); if (nodeParameters.getProfileDumpPeriod() > 0) { // Schedule profile dump generator. timer.schedule(new ProfileDumpTask(ccs), 0, nodeParameters.getProfileDumpPeriod()); } // Start heartbeat generator. heartbeatThread = new Thread(new HeartbeatTask(ccs, nodeParameters.getHeartbeatPeriod()), id + "-Heartbeat"); heartbeatThread.setPriority(Thread.MAX_PRIORITY); heartbeatThread.setDaemon(true); heartbeatThread.start(); LOGGER.log(Level.INFO, "Started NodeControllerService"); application.startupCompleted(); } public void registerNode() throws Exception { LOGGER.info("Registering with Cluster Controller"); registrationPending = true; HeartbeatSchema.GarbageCollectorInfo[] gcInfos = new HeartbeatSchema.GarbageCollectorInfo[gcMXBeans.size()]; for (int i = 0; i < gcInfos.length; ++i) { gcInfos[i] = new HeartbeatSchema.GarbageCollectorInfo(gcMXBeans.get(i).getName()); } HeartbeatSchema hbSchema = new HeartbeatSchema(gcInfos); // Use "public" versions of network addresses and ports NetworkAddress datasetAddress = datasetNetworkManager.getPublicNetworkAddress(); NetworkAddress netAddress = netManager.getPublicNetworkAddress(); NetworkAddress meesagingPort = messagingNetManager != null ? messagingNetManager.getPublicNetworkAddress() : null; int allCores = osMXBean.getAvailableProcessors(); nodeRegistration = new NodeRegistration(ipc.getSocketAddress(), id, ncConfig, netAddress, datasetAddress, osMXBean.getName(), osMXBean.getArch(), osMXBean.getVersion(), allCores, runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(), runtimeMXBean.getClassPath(), runtimeMXBean.getLibraryPath(), runtimeMXBean.getBootClassPath(), runtimeMXBean.getInputArguments(), runtimeMXBean.getSystemProperties(), hbSchema, meesagingPort, application.getCapacity(), PidHelper.getPid(), maxJobId.get()); ccs.registerNode(nodeRegistration); synchronized (this) { while (registrationPending) { wait(); } } if (registrationException != null) { LOGGER.log(Level.WARNING, "Registering with Cluster Controller failed with exception", registrationException); throw registrationException; } serviceCtx.setDistributedState(nodeParameters.getDistributedState()); application.onRegisterNode(); LOGGER.info("Registering with Cluster Controller complete"); } private void startApplication() throws Exception { serviceCtx = new NCServiceContext(this, serverCtx, ioManager, id, memoryManager, lccm, ncConfig.getNodeScopedAppConfig()); application.init(serviceCtx); executor = Executors.newCachedThreadPool(serviceCtx.getThreadFactory()); application.start(ncConfig.getAppArgsArray()); } public void updateMaxJobId(JobId jobId) { maxJobId.getAndUpdate(currentMaxId -> Math.max(currentMaxId, jobId.getId())); } @Override public synchronized void stop() throws Exception { if (shutdownCallStack == null) { shutdownCallStack = new Throwable().getStackTrace(); LOGGER.log(Level.INFO, "Stopping NodeControllerService"); application.preStop(); executor.shutdownNow(); if (!executor.awaitTermination(10, TimeUnit.SECONDS)) { LOGGER.log(Level.SEVERE, "Some jobs failed to exit, continuing with abnormal shutdown"); } partitionManager.close(); datasetPartitionManager.close(); netManager.stop(); datasetNetworkManager.stop(); if (messagingNetManager != null) { messagingNetManager.stop(); } workQueue.stop(); application.stop(); /* * Stop heartbeat after NC has stopped to avoid false node failure detection * on CC if an NC takes a long time to stop. */ if (heartbeatThread != null) { heartbeatThread.interrupt(); heartbeatThread.join(1000); // give it 1s to stop gracefully } try { ccs.notifyShutdown(id); } catch (Exception e) { LOGGER.log(Level.WARNING, "Exception notifying CC of shutdown", e); } ipc.stop(); LOGGER.log(Level.INFO, "Stopped NodeControllerService"); } else { LOGGER.log(Level.SEVERE, "Duplicate shutdown call; original: " + Arrays.toString(shutdownCallStack), new Exception("Duplicate shutdown call")); } } public String getId() { return id; } public ServerContext getServerContext() { return serverCtx; } public Map<JobId, Joblet> getJobletMap() { return jobletMap; } public void removeJobParameterByteStore(JobId jobId) { jobParameterByteStoreMap.remove(jobId); } public JobParameterByteStore createOrGetJobParameterByteStore(JobId jobId) throws HyracksException { JobParameterByteStore jpbs = jobParameterByteStoreMap.get(jobId); if (jpbs == null) { jpbs = new JobParameterByteStore(); jobParameterByteStoreMap.put(jobId, jpbs); } return jpbs; } public void storeActivityClusterGraph(DeployedJobSpecId deployedJobSpecId, ActivityClusterGraph acg) throws HyracksException { if (deployedJobSpecActivityClusterGraphMap.get(deployedJobSpecId.getId()) != null) { throw HyracksException.create(ErrorCode.DUPLICATE_DEPLOYED_JOB, deployedJobSpecId); } deployedJobSpecActivityClusterGraphMap.put(deployedJobSpecId.getId(), acg); } public void removeActivityClusterGraph(DeployedJobSpecId deployedJobSpecId) throws HyracksException { if (deployedJobSpecActivityClusterGraphMap.get(deployedJobSpecId.getId()) == null) { throw HyracksException.create(ErrorCode.ERROR_FINDING_DEPLOYED_JOB, deployedJobSpecId); } deployedJobSpecActivityClusterGraphMap.remove(deployedJobSpecId.getId()); } public void checkForDuplicateDeployedJobSpec(DeployedJobSpecId deployedJobSpecId) throws HyracksException { if (deployedJobSpecActivityClusterGraphMap.get(deployedJobSpecId.getId()) != null) { throw HyracksException.create(ErrorCode.DUPLICATE_DEPLOYED_JOB, deployedJobSpecId); } } public ActivityClusterGraph getActivityClusterGraph(DeployedJobSpecId deployedJobSpecId) throws HyracksException { return deployedJobSpecActivityClusterGraphMap.get(deployedJobSpecId.getId()); } public NetworkManager getNetworkManager() { return netManager; } public DatasetNetworkManager getDatasetNetworkManager() { return datasetNetworkManager; } public PartitionManager getPartitionManager() { return partitionManager; } public IClusterController getClusterController() { return ccs; } public NodeParameters getNodeParameters() { return nodeParameters; } @Override public ExecutorService getExecutor() { return executor; } public NCConfig getConfiguration() { return ncConfig; } public WorkQueue getWorkQueue() { return workQueue; } private class HeartbeatTask implements Runnable { private final Semaphore delayBlock = new Semaphore(0); private final IClusterController cc; private final long heartbeatPeriodNanos; private final HeartbeatData hbData; HeartbeatTask(IClusterController cc, long heartbeatPeriod) { this.cc = cc; this.heartbeatPeriodNanos = TimeUnit.MILLISECONDS.toNanos(heartbeatPeriod); hbData = new HeartbeatData(); hbData.gcCollectionCounts = new long[gcMXBeans.size()]; hbData.gcCollectionTimes = new long[gcMXBeans.size()]; } @Override public void run() { while (!Thread.currentThread().isInterrupted()) { try { long nextFireNanoTime = System.nanoTime() + heartbeatPeriodNanos; final boolean success = execute(); sleepUntilNextFire(success ? nextFireNanoTime - System.nanoTime() : ONE_SECOND_NANOS); } catch (InterruptedException e) { // NOSONAR break; } } LOGGER.log(Level.INFO, "Heartbeat thread interrupted; shutting down"); } private void sleepUntilNextFire(long delayNanos) throws InterruptedException { if (delayNanos > 0) { delayBlock.tryAcquire(delayNanos, TimeUnit.NANOSECONDS); //NOSONAR - ignore result of tryAcquire } else { LOGGER.warning("After sending heartbeat, next one is already late by " + TimeUnit.NANOSECONDS.toMillis(-delayNanos) + "ms; sending without delay"); } } private boolean execute() throws InterruptedException { MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage(); hbData.heapInitSize = heapUsage.getInit(); hbData.heapUsedSize = heapUsage.getUsed(); hbData.heapCommittedSize = heapUsage.getCommitted(); hbData.heapMaxSize = heapUsage.getMax(); MemoryUsage nonheapUsage = memoryMXBean.getNonHeapMemoryUsage(); hbData.nonheapInitSize = nonheapUsage.getInit(); hbData.nonheapUsedSize = nonheapUsage.getUsed(); hbData.nonheapCommittedSize = nonheapUsage.getCommitted(); hbData.nonheapMaxSize = nonheapUsage.getMax(); hbData.threadCount = threadMXBean.getThreadCount(); hbData.peakThreadCount = threadMXBean.getPeakThreadCount(); hbData.totalStartedThreadCount = threadMXBean.getTotalStartedThreadCount(); hbData.systemLoadAverage = osMXBean.getSystemLoadAverage(); int gcN = gcMXBeans.size(); for (int i = 0; i < gcN; ++i) { GarbageCollectorMXBean gcMXBean = gcMXBeans.get(i); hbData.gcCollectionCounts[i] = gcMXBean.getCollectionCount(); hbData.gcCollectionTimes[i] = gcMXBean.getCollectionTime(); } MuxDemuxPerformanceCounters netPC = netManager.getPerformanceCounters(); hbData.netPayloadBytesRead = netPC.getPayloadBytesRead(); hbData.netPayloadBytesWritten = netPC.getPayloadBytesWritten(); hbData.netSignalingBytesRead = netPC.getSignalingBytesRead(); hbData.netSignalingBytesWritten = netPC.getSignalingBytesWritten(); MuxDemuxPerformanceCounters datasetNetPC = datasetNetworkManager.getPerformanceCounters(); hbData.datasetNetPayloadBytesRead = datasetNetPC.getPayloadBytesRead(); hbData.datasetNetPayloadBytesWritten = datasetNetPC.getPayloadBytesWritten(); hbData.datasetNetSignalingBytesRead = datasetNetPC.getSignalingBytesRead(); hbData.datasetNetSignalingBytesWritten = datasetNetPC.getSignalingBytesWritten(); IPCPerformanceCounters ipcPC = ipc.getPerformanceCounters(); hbData.ipcMessagesSent = ipcPC.getMessageSentCount(); hbData.ipcMessageBytesSent = ipcPC.getMessageBytesSent(); hbData.ipcMessagesReceived = ipcPC.getMessageReceivedCount(); hbData.ipcMessageBytesReceived = ipcPC.getMessageBytesReceived(); hbData.diskReads = ioCounter.getReads(); hbData.diskWrites = ioCounter.getWrites(); hbData.numCores = Runtime.getRuntime().availableProcessors(); try { cc.nodeHeartbeat(id, hbData); LOGGER.log(Level.FINE, "Successfully sent heartbeat"); return true; } catch (InterruptedException e) { throw e; } catch (Exception e) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, "Exception sending heartbeat; will retry after 1s", e); } else { LOGGER.log(Level.SEVERE, "Exception sending heartbeat; will retry after 1s: " + e.toString()); } return false; } } } private class ProfileDumpTask extends TimerTask { private IClusterController cc; public ProfileDumpTask(IClusterController cc) { this.cc = cc; } @Override public void run() { try { FutureValue<List<JobProfile>> fv = new FutureValue<>(); BuildJobProfilesWork bjpw = new BuildJobProfilesWork(NodeControllerService.this, fv); workQueue.scheduleAndSync(bjpw); List<JobProfile> profiles = fv.get(); if (!profiles.isEmpty()) { cc.reportProfile(id, profiles); } } catch (Exception e) { LOGGER.log(Level.WARNING, "Exception reporting profile", e); } } } private class TraceCurrentTimeTask extends TimerTask { private ITracer tracer; private long traceCategory; public TraceCurrentTimeTask(ITracer tracer) { this.tracer = tracer; this.traceCategory = tracer.getRegistry().get("Timestamp"); } @Override public void run() { try { tracer.instant("CurrentTime", traceCategory, Tracer.Scope.p, Tracer.dateTimeStamp()); } catch (Exception e) { LOGGER.log(Level.WARNING, "Exception tracing current time", e); } } } public void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId) throws Exception { ccs.sendApplicationMessageToCC(data, deploymentId, id); } public IDatasetPartitionManager getDatasetPartitionManager() { return datasetPartitionManager; } public MessagingNetworkManager getMessagingNetworkManager() { return messagingNetManager; } private static INCApplication getApplication(NCConfig config) throws ClassNotFoundException, IllegalAccessException, InstantiationException { if (config.getAppClass() != null) { Class<?> c = Class.forName(config.getAppClass()); return (INCApplication) c.newInstance(); } else { return BaseNCApplication.INSTANCE; } } @Override public Object getApplicationContext() { return application.getApplicationContext(); } }
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.email; import com.android.emailcommon.mail.MessagingException; import android.content.Context; import java.util.ArrayList; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; public class GroupMessagingListener extends MessagingListener { /* The synchronization of the methods in this class is not needed because we use ConcurrentHashMap. Nevertheless, let's keep the "synchronized" for a while in the case we may want to change the implementation to use something else than ConcurrentHashMap. */ private ConcurrentHashMap<MessagingListener, Object> mListenersMap = new ConcurrentHashMap<MessagingListener, Object>(); private Set<MessagingListener> mListeners = mListenersMap.keySet(); synchronized public void addListener(MessagingListener listener) { // we use "this" as a dummy non-null value mListenersMap.put(listener, this); } synchronized public void removeListener(MessagingListener listener) { mListenersMap.remove(listener); } synchronized public boolean isActiveListener(MessagingListener listener) { return mListenersMap.containsKey(listener); } @Override synchronized public void listFoldersStarted(long accountId) { for (MessagingListener l : mListeners) { l.listFoldersStarted(accountId); } } @Override synchronized public void listFoldersFailed(long accountId, String message) { for (MessagingListener l : mListeners) { l.listFoldersFailed(accountId, message); } } @Override synchronized public void listFoldersFinished(long accountId) { for (MessagingListener l : mListeners) { l.listFoldersFinished(accountId); } } @Override synchronized public void synchronizeMailboxStarted(long accountId, long mailboxId) { for (MessagingListener l : mListeners) { l.synchronizeMailboxStarted(accountId, mailboxId); } } @Override synchronized public void synchronizeMailboxFinished(long accountId, long mailboxId, int totalMessagesInMailbox, int numNewMessages, ArrayList<Long> addedMessages) { for (MessagingListener l : mListeners) { l.synchronizeMailboxFinished(accountId, mailboxId, totalMessagesInMailbox, numNewMessages, addedMessages); } } @Override synchronized public void synchronizeMailboxFailed(long accountId, long mailboxId, Exception e) { for (MessagingListener l : mListeners) { l.synchronizeMailboxFailed(accountId, mailboxId, e); } } @Override synchronized public void loadMessageForViewStarted(long messageId) { for (MessagingListener l : mListeners) { l.loadMessageForViewStarted(messageId); } } @Override synchronized public void loadMessageForViewFinished(long messageId) { for (MessagingListener l : mListeners) { l.loadMessageForViewFinished(messageId); } } @Override synchronized public void loadMessageForViewFailed(long messageId, String message) { for (MessagingListener l : mListeners) { l.loadMessageForViewFailed(messageId, message); } } @Override synchronized public void checkMailStarted(Context context, long accountId, long tag) { for (MessagingListener l : mListeners) { l.checkMailStarted(context, accountId, tag); } } @Override synchronized public void checkMailFinished(Context context, long accountId, long folderId, long tag) { for (MessagingListener l : mListeners) { l.checkMailFinished(context, accountId, folderId, tag); } } @Override synchronized public void sendPendingMessagesStarted(long accountId, long messageId) { for (MessagingListener l : mListeners) { l.sendPendingMessagesStarted(accountId, messageId); } } @Override synchronized public void sendPendingMessagesCompleted(long accountId) { for (MessagingListener l : mListeners) { l.sendPendingMessagesCompleted(accountId); } } @Override synchronized public void sendPendingMessagesFailed(long accountId, long messageId, Exception reason) { for (MessagingListener l : mListeners) { l.sendPendingMessagesFailed(accountId, messageId, reason); } } @Override synchronized public void messageUidChanged(long accountId, long mailboxId, String oldUid, String newUid) { for (MessagingListener l : mListeners) { l.messageUidChanged(accountId, mailboxId, oldUid, newUid); } } @Override synchronized public void loadAttachmentStarted( long accountId, long messageId, long attachmentId, boolean requiresDownload) { for (MessagingListener l : mListeners) { l.loadAttachmentStarted(accountId, messageId, attachmentId, requiresDownload); } } @Override synchronized public void loadAttachmentFinished( long accountId, long messageId, long attachmentId) { for (MessagingListener l : mListeners) { l.loadAttachmentFinished(accountId, messageId, attachmentId); } } @Override synchronized public void loadAttachmentFailed( long accountId, long messageId, long attachmentId, MessagingException me, boolean background) { for (MessagingListener l : mListeners) { l.loadAttachmentFailed(accountId, messageId, attachmentId, me, background); } } @Override synchronized public void controllerCommandCompleted(boolean moreCommandsToRun) { for (MessagingListener l : mListeners) { l.controllerCommandCompleted(moreCommandsToRun); } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.thoughtworks.selenium.webdriven; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.thoughtworks.selenium.CommandProcessor; import com.thoughtworks.selenium.SeleniumException; import org.openqa.selenium.Capabilities; import org.openqa.selenium.ImmutableCapabilities; import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.edge.EdgeOptions; import org.openqa.selenium.firefox.FirefoxOptions; import org.openqa.selenium.grid.session.ActiveSession; import org.openqa.selenium.ie.InternetExplorerOptions; import org.openqa.selenium.opera.OperaOptions; import org.openqa.selenium.remote.NewSessionPayload; import org.openqa.selenium.remote.SessionId; import org.openqa.selenium.remote.http.FormEncodedData; import org.openqa.selenium.remote.http.HttpRequest; import org.openqa.selenium.remote.http.HttpResponse; import org.openqa.selenium.remote.http.Routable; import org.openqa.selenium.remote.server.ActiveSessionFactory; import org.openqa.selenium.remote.server.ActiveSessionListener; import org.openqa.selenium.remote.server.ActiveSessions; import org.openqa.selenium.remote.server.NewSessionPipeline; import org.openqa.selenium.remote.tracing.Tracer; import org.openqa.selenium.safari.SafariOptions; import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.net.HttpURLConnection.HTTP_OK; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.logging.Level.WARNING; import static org.openqa.selenium.remote.http.Contents.utf8String; import static org.openqa.selenium.remote.http.HttpMethod.POST; /** * An implementation of the original selenium rc server endpoint, using a webdriver-backed selenium * in order to get things working. */ public class WebDriverBackedSeleniumHandler implements Routable { // Prepare the shared set of thingies private static final Map<SessionId, CommandProcessor> PROCESSORS = new ConcurrentHashMap<>(); private static final Logger LOG = Logger.getLogger(WebDriverBackedSelenium.class.getName()); private NewSessionPipeline pipeline; private ActiveSessions sessions; private ActiveSessionListener listener; public WebDriverBackedSeleniumHandler(Tracer tracer, ActiveSessions sessions) { this.sessions = sessions == null ? new ActiveSessions(5, MINUTES) : sessions; listener = new ActiveSessionListener() { @Override public void onStop(ActiveSession session) { PROCESSORS.remove(session.getId()); } }; this.sessions.addListener(listener); this.pipeline = NewSessionPipeline.builder().add(new ActiveSessionFactory(tracer)).create(); } @Override public boolean matches(HttpRequest req) { return req.getMethod() == POST && ("/selenium-server/driver/".equals(req.getUri()) || "/selenium-server/driver".equals(req.getUri())); } @Override public HttpResponse execute(HttpRequest req) throws UncheckedIOException { Optional<Map<String, List<String>>> params = FormEncodedData.getData(req); String cmd = getValue("cmd", params, req); SessionId sessionId = null; if (getValue("sessionId", params, req) != null) { sessionId = new SessionId(getValue("sessionId", params, req)); } String[] args = deserializeArgs(params, req); if (cmd == null) { return sendError(HTTP_NOT_FOUND, "Unable to find cmd query parameter"); } StringBuilder printableArgs = new StringBuilder("["); Joiner.on(", ").appendTo(printableArgs, args); printableArgs.append("]"); LOG.info(String.format("Command request: %s%s on session %s", cmd, printableArgs, sessionId)); if ("getNewBrowserSession".equals(cmd)) { // Figure out what to do. If the first arg is "*webdriver", check for a session id and use // that existing session if present. Otherwise, start a new session with whatever comes to // hand. If, however, the first parameter specifies something else, then create a session // using a webdriver-backed instance of that. return startNewSession(args[0], args[1], args.length == 4 ? args[3] : ""); } else if ("testComplete".equals(cmd)) { CommandProcessor commandProcessor = PROCESSORS.get(sessionId); sessions.invalidate(sessionId); if (commandProcessor == null) { return sendError(HTTP_NOT_FOUND, "Unable to find command processor for " + sessionId); } return sendResponse(null); } // Common case. CommandProcessor commandProcessor = PROCESSORS.get(sessionId); if (commandProcessor == null) { return sendError(HTTP_NOT_FOUND, "Unable to find command processor for " + sessionId); } try { String result = commandProcessor.doCommand(cmd, args); return sendResponse(result); } catch (SeleniumException e) { return sendError(HTTP_OK, e.getMessage()); } } private HttpResponse startNewSession( String browserString, String baseUrl, String options) { SessionId sessionId = null; if (options.startsWith("webdriver.remote.sessionid")) { // We may have a hit List<String> split = Splitter.on("=") .omitEmptyStrings() .trimResults() .limit(2) .splitToList(options); if (!"webdriver.remote.sessionid".equals(split.get(0))) { LOG.warning("Unable to find existing webdriver session. Wrong parameter name: " + options); return sendError( HTTP_OK, "Unable to find existing webdriver session. Wrong parameter name: " + options); } if (split.size() != 2) { LOG.warning("Attempted to find webdriver id, but none specified. Bailing"); return sendError( HTTP_OK, "Unable to find existing webdriver session. No ID specified"); } sessionId = new SessionId(split.get(1)); } if (sessionId == null) { // Let's see if the user chose "webdriver" or something specific. Capabilities caps; switch (browserString) { case "*webdriver": caps = new ImmutableCapabilities(); break; case "*chrome": case "*firefox": case "*firefoxproxy": case "*firefoxchrome": case "*pifirefox": caps = new FirefoxOptions(); break; case "*iehta": case "*iexplore": case "*iexploreproxy": case "*piiexplore": caps = new InternetExplorerOptions(); break; case "*googlechrome": caps = new ChromeOptions(); break; case "*MicrosoftEdge": caps = new EdgeOptions(); break; case "*opera": case "*operablink": caps = new OperaOptions(); break; case "*safari": case "*safariproxy": caps = new SafariOptions(); break; default: return sendError(HTTP_OK, "Unable to match browser string: " + browserString); } try (NewSessionPayload payload = NewSessionPayload.create(caps)) { ActiveSession session = pipeline.createNewSession(payload); sessions.put(session); sessionId = session.getId(); } catch (Exception e) { LOG.log(WARNING, "Unable to start session", e); return sendError( HTTP_OK, "Unable to start session. Cause can be found in logs. Message is: " + e.getMessage()); } } ActiveSession session = sessions.get(sessionId); if (session == null) { LOG.warning("Attempt to use non-existent session: " + sessionId); return sendError(HTTP_OK, "Attempt to use non-existent session: " + sessionId); } PROCESSORS.put(sessionId, new WebDriverCommandProcessor(baseUrl, session.getWrappedDriver())); return sendResponse(sessionId.toString()); } private HttpResponse sendResponse(String result) { return new HttpResponse() .setStatus(HTTP_OK) .setHeader("", "") .setContent(utf8String("OK".concat(result == null ? "" : "," + result))); } private HttpResponse sendError(int statusCode, String result) { return new HttpResponse() .setStatus(statusCode) .setHeader("", "") .setContent(utf8String("ERROR".concat(result == null ? "" : ": " + result))); } private String[] deserializeArgs(Optional<Map<String, List<String>>> params, HttpRequest req) { // 5 was picked as the maximum length used by the `start` command List<String> args = new ArrayList<>(); for (int i = 0; i < 5; i++) { String value = getValue(String.valueOf(i + 1), params, req); if (value != null) { args.add(value); } else { break; } } return args.toArray(new String[0]); } private String getValue(String key, Optional<Map<String, List<String>>> params, HttpRequest request) { return params.map(data -> { List<String> values = data.getOrDefault(key, new ArrayList<>()); if (values.isEmpty()) { return request.getQueryParameter(key); } return values.get(0); }).orElseGet(() -> request.getQueryParameter(key)); } }
// ======================================================================== // Copyright (c) 2006-2009 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // You may elect to redistribute this code under either of these licenses. // ======================================================================== package org.eclipse.jetty.ajp; import java.io.ByteArrayInputStream; import java.io.IOException; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.util.Collection; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServletResponse; import org.eclipse.jetty.http.HttpException; import org.eclipse.jetty.io.Buffer; import org.eclipse.jetty.io.EndPoint; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.HttpConnection; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; /** * Connection implementation of the Ajp13 protocol. <p/> XXX Refactor to remove * duplication of HttpConnection * */ public class Ajp13Connection extends HttpConnection { public Ajp13Connection(Connector connector, EndPoint endPoint, Server server) { super(connector, endPoint, server, new Ajp13Parser(connector.getRequestBuffers(), endPoint), new Ajp13Generator(connector.getResponseBuffers(), endPoint), new Ajp13Request() ); ((Ajp13Parser)_parser).setEventHandler(new RequestHandler()); ((Ajp13Parser)_parser).setGenerator((Ajp13Generator)_generator); ((Ajp13Request)_request).setConnection(this); } @Override public boolean isConfidential(Request request) { return ((Ajp13Request) request).isSslSecure(); } @Override public boolean isIntegral(Request request) { return ((Ajp13Request) request).isSslSecure(); } @Override public ServletInputStream getInputStream() { if (_in == null) _in = new Ajp13Parser.Input((Ajp13Parser) _parser, _connector.getMaxIdleTime()); return _in; } private class RequestHandler implements Ajp13Parser.EventHandler { public void startForwardRequest() throws IOException { _uri.clear(); ((Ajp13Request) _request).setSslSecure(false); _request.setTimeStamp(System.currentTimeMillis()); _request.setUri(_uri); } public void parsedAuthorizationType(Buffer authType) throws IOException { //TODO JASPI this doesn't appear to make sense yet... how does ajp auth fit into jetty auth? // _request.setAuthType(authType.toString()); } public void parsedRemoteUser(Buffer remoteUser) throws IOException { ((Ajp13Request)_request).setRemoteUser(remoteUser.toString()); } public void parsedServletPath(Buffer servletPath) throws IOException { _request.setServletPath(servletPath.toString()); } public void parsedContextPath(Buffer context) throws IOException { _request.setContextPath(context.toString()); } public void parsedSslCert(Buffer sslCert) throws IOException { try { CertificateFactory cf = CertificateFactory.getInstance("X.509"); ByteArrayInputStream bis = new ByteArrayInputStream(sslCert.toString().getBytes()); Collection<? extends java.security.cert.Certificate> certCollection = cf.generateCertificates(bis); X509Certificate[] certificates = new X509Certificate[certCollection.size()]; int i=0; for (Object aCertCollection : certCollection) { certificates[i++] = (X509Certificate) aCertCollection; } _request.setAttribute("javax.servlet.request.X509Certificate", certificates); } catch (Exception e) { org.eclipse.jetty.util.log.Log.warn(e.toString()); org.eclipse.jetty.util.log.Log.ignore(e); if (sslCert!=null) _request.setAttribute("javax.servlet.request.X509Certificate", sslCert.toString()); } } public void parsedSslCipher(Buffer sslCipher) throws IOException { _request.setAttribute("javax.servlet.request.cipher_suite", sslCipher.toString()); } public void parsedSslSession(Buffer sslSession) throws IOException { _request.setAttribute("javax.servlet.request.ssl_session", sslSession.toString()); } public void parsedSslKeySize(int keySize) throws IOException { _request.setAttribute("javax.servlet.request.key_size", new Integer(keySize)); } public void parsedMethod(Buffer method) throws IOException { if (method == null) throw new HttpException(HttpServletResponse.SC_BAD_REQUEST); _request.setMethod(method.toString()); } public void parsedUri(Buffer uri) throws IOException { _uri.parse(uri.toString()); } public void parsedProtocol(Buffer protocol) throws IOException { if (protocol != null && protocol.length()>0) { _request.setProtocol(protocol.toString()); } } public void parsedRemoteAddr(Buffer addr) throws IOException { if (addr != null && addr.length()>0) { _request.setRemoteAddr(addr.toString()); } } public void parsedRemoteHost(Buffer name) throws IOException { if (name != null && name.length()>0) { _request.setRemoteHost(name.toString()); } } public void parsedServerName(Buffer name) throws IOException { if (name != null && name.length()>0) { _request.setServerName(name.toString()); } } public void parsedServerPort(int port) throws IOException { _request.setServerPort(port); } public void parsedSslSecure(boolean secure) throws IOException { ((Ajp13Request) _request).setSslSecure(secure); } public void parsedQueryString(Buffer value) throws IOException { String u = _uri + "?" + value; _uri.parse(u); } public void parsedHeader(Buffer name, Buffer value) throws IOException { _requestFields.add(name, value); } public void parsedRequestAttribute(String key, Buffer value) throws IOException { _request.setAttribute(key, value.toString()); } public void parsedRequestAttribute(String key, int value) throws IOException { _request.setAttribute(key, Integer.toString(value)); } public void headerComplete() throws IOException { handleRequest(); } public void messageComplete(long contextLength) throws IOException { } public void content(Buffer ref) throws IOException { } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workdocs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workdocs-2016-05-01/UpdateUser" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateUserRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * </p> */ private String authenticationToken; /** * <p> * The ID of the user. * </p> */ private String userId; /** * <p> * The given name of the user. * </p> */ private String givenName; /** * <p> * The surname of the user. * </p> */ private String surname; /** * <p> * The type of the user. * </p> */ private String type; /** * <p> * The amount of storage for the user. * </p> */ private StorageRuleType storageRule; /** * <p> * The time zone ID of the user. * </p> */ private String timeZoneId; /** * <p> * The locale of the user. * </p> */ private String locale; /** * <p> * Boolean value to determine whether the user is granted Poweruser privileges. * </p> */ private String grantPoweruserPrivileges; /** * <p> * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * </p> * * @param authenticationToken * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. */ public void setAuthenticationToken(String authenticationToken) { this.authenticationToken = authenticationToken; } /** * <p> * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * </p> * * @return Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. */ public String getAuthenticationToken() { return this.authenticationToken; } /** * <p> * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * </p> * * @param authenticationToken * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateUserRequest withAuthenticationToken(String authenticationToken) { setAuthenticationToken(authenticationToken); return this; } /** * <p> * The ID of the user. * </p> * * @param userId * The ID of the user. */ public void setUserId(String userId) { this.userId = userId; } /** * <p> * The ID of the user. * </p> * * @return The ID of the user. */ public String getUserId() { return this.userId; } /** * <p> * The ID of the user. * </p> * * @param userId * The ID of the user. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateUserRequest withUserId(String userId) { setUserId(userId); return this; } /** * <p> * The given name of the user. * </p> * * @param givenName * The given name of the user. */ public void setGivenName(String givenName) { this.givenName = givenName; } /** * <p> * The given name of the user. * </p> * * @return The given name of the user. */ public String getGivenName() { return this.givenName; } /** * <p> * The given name of the user. * </p> * * @param givenName * The given name of the user. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateUserRequest withGivenName(String givenName) { setGivenName(givenName); return this; } /** * <p> * The surname of the user. * </p> * * @param surname * The surname of the user. */ public void setSurname(String surname) { this.surname = surname; } /** * <p> * The surname of the user. * </p> * * @return The surname of the user. */ public String getSurname() { return this.surname; } /** * <p> * The surname of the user. * </p> * * @param surname * The surname of the user. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateUserRequest withSurname(String surname) { setSurname(surname); return this; } /** * <p> * The type of the user. * </p> * * @param type * The type of the user. * @see UserType */ public void setType(String type) { this.type = type; } /** * <p> * The type of the user. * </p> * * @return The type of the user. * @see UserType */ public String getType() { return this.type; } /** * <p> * The type of the user. * </p> * * @param type * The type of the user. * @return Returns a reference to this object so that method calls can be chained together. * @see UserType */ public UpdateUserRequest withType(String type) { setType(type); return this; } /** * <p> * The type of the user. * </p> * * @param type * The type of the user. * @see UserType */ public void setType(UserType type) { withType(type); } /** * <p> * The type of the user. * </p> * * @param type * The type of the user. * @return Returns a reference to this object so that method calls can be chained together. * @see UserType */ public UpdateUserRequest withType(UserType type) { this.type = type.toString(); return this; } /** * <p> * The amount of storage for the user. * </p> * * @param storageRule * The amount of storage for the user. */ public void setStorageRule(StorageRuleType storageRule) { this.storageRule = storageRule; } /** * <p> * The amount of storage for the user. * </p> * * @return The amount of storage for the user. */ public StorageRuleType getStorageRule() { return this.storageRule; } /** * <p> * The amount of storage for the user. * </p> * * @param storageRule * The amount of storage for the user. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateUserRequest withStorageRule(StorageRuleType storageRule) { setStorageRule(storageRule); return this; } /** * <p> * The time zone ID of the user. * </p> * * @param timeZoneId * The time zone ID of the user. */ public void setTimeZoneId(String timeZoneId) { this.timeZoneId = timeZoneId; } /** * <p> * The time zone ID of the user. * </p> * * @return The time zone ID of the user. */ public String getTimeZoneId() { return this.timeZoneId; } /** * <p> * The time zone ID of the user. * </p> * * @param timeZoneId * The time zone ID of the user. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateUserRequest withTimeZoneId(String timeZoneId) { setTimeZoneId(timeZoneId); return this; } /** * <p> * The locale of the user. * </p> * * @param locale * The locale of the user. * @see LocaleType */ public void setLocale(String locale) { this.locale = locale; } /** * <p> * The locale of the user. * </p> * * @return The locale of the user. * @see LocaleType */ public String getLocale() { return this.locale; } /** * <p> * The locale of the user. * </p> * * @param locale * The locale of the user. * @return Returns a reference to this object so that method calls can be chained together. * @see LocaleType */ public UpdateUserRequest withLocale(String locale) { setLocale(locale); return this; } /** * <p> * The locale of the user. * </p> * * @param locale * The locale of the user. * @see LocaleType */ public void setLocale(LocaleType locale) { withLocale(locale); } /** * <p> * The locale of the user. * </p> * * @param locale * The locale of the user. * @return Returns a reference to this object so that method calls can be chained together. * @see LocaleType */ public UpdateUserRequest withLocale(LocaleType locale) { this.locale = locale.toString(); return this; } /** * <p> * Boolean value to determine whether the user is granted Poweruser privileges. * </p> * * @param grantPoweruserPrivileges * Boolean value to determine whether the user is granted Poweruser privileges. * @see BooleanEnumType */ public void setGrantPoweruserPrivileges(String grantPoweruserPrivileges) { this.grantPoweruserPrivileges = grantPoweruserPrivileges; } /** * <p> * Boolean value to determine whether the user is granted Poweruser privileges. * </p> * * @return Boolean value to determine whether the user is granted Poweruser privileges. * @see BooleanEnumType */ public String getGrantPoweruserPrivileges() { return this.grantPoweruserPrivileges; } /** * <p> * Boolean value to determine whether the user is granted Poweruser privileges. * </p> * * @param grantPoweruserPrivileges * Boolean value to determine whether the user is granted Poweruser privileges. * @return Returns a reference to this object so that method calls can be chained together. * @see BooleanEnumType */ public UpdateUserRequest withGrantPoweruserPrivileges(String grantPoweruserPrivileges) { setGrantPoweruserPrivileges(grantPoweruserPrivileges); return this; } /** * <p> * Boolean value to determine whether the user is granted Poweruser privileges. * </p> * * @param grantPoweruserPrivileges * Boolean value to determine whether the user is granted Poweruser privileges. * @see BooleanEnumType */ public void setGrantPoweruserPrivileges(BooleanEnumType grantPoweruserPrivileges) { withGrantPoweruserPrivileges(grantPoweruserPrivileges); } /** * <p> * Boolean value to determine whether the user is granted Poweruser privileges. * </p> * * @param grantPoweruserPrivileges * Boolean value to determine whether the user is granted Poweruser privileges. * @return Returns a reference to this object so that method calls can be chained together. * @see BooleanEnumType */ public UpdateUserRequest withGrantPoweruserPrivileges(BooleanEnumType grantPoweruserPrivileges) { this.grantPoweruserPrivileges = grantPoweruserPrivileges.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAuthenticationToken() != null) sb.append("AuthenticationToken: ").append("***Sensitive Data Redacted***").append(","); if (getUserId() != null) sb.append("UserId: ").append(getUserId()).append(","); if (getGivenName() != null) sb.append("GivenName: ").append(getGivenName()).append(","); if (getSurname() != null) sb.append("Surname: ").append(getSurname()).append(","); if (getType() != null) sb.append("Type: ").append(getType()).append(","); if (getStorageRule() != null) sb.append("StorageRule: ").append(getStorageRule()).append(","); if (getTimeZoneId() != null) sb.append("TimeZoneId: ").append(getTimeZoneId()).append(","); if (getLocale() != null) sb.append("Locale: ").append(getLocale()).append(","); if (getGrantPoweruserPrivileges() != null) sb.append("GrantPoweruserPrivileges: ").append(getGrantPoweruserPrivileges()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateUserRequest == false) return false; UpdateUserRequest other = (UpdateUserRequest) obj; if (other.getAuthenticationToken() == null ^ this.getAuthenticationToken() == null) return false; if (other.getAuthenticationToken() != null && other.getAuthenticationToken().equals(this.getAuthenticationToken()) == false) return false; if (other.getUserId() == null ^ this.getUserId() == null) return false; if (other.getUserId() != null && other.getUserId().equals(this.getUserId()) == false) return false; if (other.getGivenName() == null ^ this.getGivenName() == null) return false; if (other.getGivenName() != null && other.getGivenName().equals(this.getGivenName()) == false) return false; if (other.getSurname() == null ^ this.getSurname() == null) return false; if (other.getSurname() != null && other.getSurname().equals(this.getSurname()) == false) return false; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getStorageRule() == null ^ this.getStorageRule() == null) return false; if (other.getStorageRule() != null && other.getStorageRule().equals(this.getStorageRule()) == false) return false; if (other.getTimeZoneId() == null ^ this.getTimeZoneId() == null) return false; if (other.getTimeZoneId() != null && other.getTimeZoneId().equals(this.getTimeZoneId()) == false) return false; if (other.getLocale() == null ^ this.getLocale() == null) return false; if (other.getLocale() != null && other.getLocale().equals(this.getLocale()) == false) return false; if (other.getGrantPoweruserPrivileges() == null ^ this.getGrantPoweruserPrivileges() == null) return false; if (other.getGrantPoweruserPrivileges() != null && other.getGrantPoweruserPrivileges().equals(this.getGrantPoweruserPrivileges()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAuthenticationToken() == null) ? 0 : getAuthenticationToken().hashCode()); hashCode = prime * hashCode + ((getUserId() == null) ? 0 : getUserId().hashCode()); hashCode = prime * hashCode + ((getGivenName() == null) ? 0 : getGivenName().hashCode()); hashCode = prime * hashCode + ((getSurname() == null) ? 0 : getSurname().hashCode()); hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getStorageRule() == null) ? 0 : getStorageRule().hashCode()); hashCode = prime * hashCode + ((getTimeZoneId() == null) ? 0 : getTimeZoneId().hashCode()); hashCode = prime * hashCode + ((getLocale() == null) ? 0 : getLocale().hashCode()); hashCode = prime * hashCode + ((getGrantPoweruserPrivileges() == null) ? 0 : getGrantPoweruserPrivileges().hashCode()); return hashCode; } @Override public UpdateUserRequest clone() { return (UpdateUserRequest) super.clone(); } }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.esri.gpt.server.csw.provider; import com.esri.gpt.framework.context.BaseServlet; import com.esri.gpt.framework.context.RequestContext; import com.esri.gpt.framework.security.identity.NotAuthorizedException; import com.esri.gpt.framework.util.Val; import com.esri.gpt.server.csw.provider.components.IOriginalXmlProvider; import com.esri.gpt.server.csw.provider.components.IProviderFactory; import com.esri.gpt.server.csw.provider.components.OperationContext; import com.esri.gpt.server.csw.provider.components.OperationResponse; import com.esri.gpt.server.csw.provider.components.OwsException; import com.esri.gpt.server.csw.provider.components.RequestHandler; import com.esri.gpt.server.csw.provider.local.ProviderFactory; import java.io.UnsupportedEncodingException; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * CSW provider servlet. */ @SuppressWarnings("serial") public class CswServlet extends BaseServlet { /** class variables ========================================================= */ /** The Logger. */ private static Logger LOGGER = Logger.getLogger(CswServlet.class.getName()); /** instance variables ====================================================== */ private boolean allowTransactions = true; private String cswSubContextPath; private String resourceFilePrefix; /** methods ================================================================= */ /** * Initializes the servlet. * <br/>Reads the "cswSubContextPath" and "resourceFilePrefix". * init params from the servlet configuration. * @param config the servlet configuration * @throws ServletException if an initialization exception occurs */ @Override public void init(ServletConfig config) throws ServletException { super.init(config); this.cswSubContextPath = config.getInitParameter("cswSubContextPath"); this.resourceFilePrefix = config.getInitParameter("resourceFilePrefix"); String s = Val.chkStr(config.getInitParameter("allowTransactions")); this.allowTransactions = !s.equalsIgnoreCase("false"); } /** * Executes a request. * @param request the HTTP servlet request * @param response the HTTP servlet response * @param context the request context * @throws Exception if a processing exception occurs */ protected void execute(HttpServletRequest request, HttpServletResponse response, RequestContext context) throws Exception { // check for a request to return a full xml String sGetXmlUuid = ""; String sParamsLC = Val.chkStr(request.getQueryString()); if (sParamsLC.indexOf("getxml=") != -1) { sGetXmlUuid = Val.chkStr(request.getParameter("getxml")); // try to decode uuids that have been mistakenly double encoded by an external client if (sGetXmlUuid.startsWith("%7B")) { try { String s = java.net.URLDecoder.decode(sGetXmlUuid,"UTF-8"); sGetXmlUuid = Val.chkStr(s); } catch (UnsupportedEncodingException ue) {} } } // return the full xml if requested if (sGetXmlUuid.length() > 0) { LOGGER.finer("Retrieving document: "+sGetXmlUuid); String xml = ""; try { xml = readFullXml(request,response,context,sGetXmlUuid); } catch (NotAuthorizedException nae) { throw nae; } catch (Throwable t) { LOGGER.warning("\nError retrieving document: "+sGetXmlUuid+"\n "+t.toString()); } this.writeXmlResponse(response,Val.chkStr(xml)); // execute a normal CSW request } else { executeCSW(request,response,context); } } /** * Executes a CSW request. * @param request the HTTP servlet request * @param response the HTTP servlet response * @param context the request context * @throws Exception if a processing exception occurs */ protected void executeCSW(HttpServletRequest request, HttpServletResponse response, RequestContext context) throws Exception { // process the request LOGGER.fine("Executing CSW provider request...."); String cswResponse = ""; String mimeType = "application/xml"; RequestHandler handler = null; OperationResponse opResponse = null; try { String cswRequest = readInputCharacters(request); LOGGER.finer("cswRequest:\n"+cswRequest); handler = this.makeRequestHandler(request,response,context); if (cswRequest.length() > 0) { opResponse = handler.handleXML(cswRequest); } else { opResponse = handler.handleGet(request); } if (opResponse != null) { cswResponse = Val.chkStr(opResponse.getResponseXml()); String fmt = Val.chkStr(opResponse.getOutputFormat()); if (fmt.equalsIgnoreCase("text/xml")) { mimeType = "text/xml"; } } } catch (Exception e) { OperationContext opContext = null; if (handler != null) { opContext = handler.getOperationContext(); } cswResponse = handleException(opContext,e); } // write the response LOGGER.finer("cswResponse:\n"+cswResponse); if (cswResponse.length() > 0) { writeCharacterResponse(response,cswResponse,"UTF-8",mimeType+"; charset=UTF-8"); } } /** * Creation an ExceptionReport response when an exception is encountered. * @param e the exception * @return the exception report string * @throws throws Exception if an authorization related exception occurs * @deprecated replaced by {@link #handleException(OperationContext,Exception)} */ protected String handleException(Exception e) throws Exception { return this.handleException(null,e); } /** * Creation an ExceptionReport response when an exception is encountered. * @param context the operation context * @param e the exception * @return the exception report string * @throws throws Exception if an authorization related exception occurs */ protected String handleException(OperationContext context, Exception e) throws Exception { if (e instanceof NotAuthorizedException) { throw e; } else if (e instanceof OwsException) { OwsException ows = (OwsException)e; LOGGER.finer("Invalid CSW request: "+e.getMessage()); return ows.getReport(context); } else { OwsException ows = new OwsException(e); LOGGER.log(Level.WARNING,e.toString(),e); return ows.getReport(context); } } /** * Makes a handler for the CSW request. * @param request the HTTP servlet request * @param response the HTTP servlet response * @param context the request context * @return the request handler */ protected RequestHandler makeRequestHandler(HttpServletRequest request, HttpServletResponse response, RequestContext context) { IProviderFactory factory = new ProviderFactory(); RequestHandler handler = factory.makeRequestHandler( request,context,this.cswSubContextPath,this.resourceFilePrefix); if (handler != null) { handler.getOperationContext().getServiceProperties().setAllowTransactions(this.allowTransactions); } return handler; } /** * Reads the full XML associated with a document UUID. * @param request the HTTP servlet request * @param response the HTTP servlet response * @param context the request context * @param id the document id * @return the document XML * @throws Exception if an exception occurs */ protected String readFullXml(HttpServletRequest request, HttpServletResponse response, RequestContext context, String id) throws Exception { RequestHandler handler = this.makeRequestHandler(request,response,context); OperationContext opContext = handler.getOperationContext(); IProviderFactory factory = opContext.getProviderFactory(); IOriginalXmlProvider oxp = factory.makeOriginalXmlProvider(opContext); if (oxp == null) { String msg = "The getxml parameter is not supported."; String locator = "getxml"; throw new OwsException(OwsException.OWSCODE_InvalidParameterValue,locator,msg); } else { String xml = oxp.provideOriginalXml(opContext,id); return xml; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * This is not the original file distributed by the Apache Software Foundation * It has been modified by the Hipparchus project */ package org.hipparchus.linear; import java.util.Iterator; import org.hipparchus.exception.LocalizedCoreFormats; import org.hipparchus.exception.MathRuntimeException; import org.hipparchus.linear.RealVector.Entry; import org.junit.Assert; import org.junit.Test; /** * Tests for {@link RealVector}. */ public class RealVectorTest extends RealVectorAbstractTest { @Override public RealVector create(final double[] data) { return new RealVectorTestImpl(data); } @Test @Override public void testAppendVector() { checkUnsupported(() -> super.testAppendVector()); } @Test @Override public void testAppendScalar() { checkUnsupported(() -> super.testAppendScalar()); } @Test @Override public void testGetSubVector() { checkUnsupported(() -> super.testGetSubVector()); } @Test @Override public void testGetSubVectorInvalidIndex1() { checkUnsupported(() -> super.testGetSubVectorInvalidIndex1()); } @Test @Override public void testGetSubVectorInvalidIndex2() { checkUnsupported(() -> super.testGetSubVectorInvalidIndex2()); } @Test @Override public void testGetSubVectorInvalidIndex3() { checkUnsupported(() -> super.testGetSubVectorInvalidIndex3()); } @Test @Override public void testGetSubVectorInvalidIndex4() { checkUnsupported(() -> super.testGetSubVectorInvalidIndex4()); } @Test @Override public void testSetSubVectorSameType() { checkUnsupported(() -> super.testSetSubVectorSameType()); } @Test @Override public void testSetSubVectorMixedType() { checkUnsupported(() -> super.testSetSubVectorMixedType()); } @Test @Override public void testSetSubVectorInvalidIndex1() { checkUnsupported(() -> super.testSetSubVectorInvalidIndex1()); } @Test @Override public void testSetSubVectorInvalidIndex2() { checkUnsupported(() -> super.testSetSubVectorInvalidIndex2()); } @Test @Override public void testSetSubVectorInvalidIndex3() { checkUnsupported(() -> super.testSetSubVectorInvalidIndex3()); } @Test @Override public void testIsNaN() { checkUnsupported(() -> super.testIsNaN()); } @Test @Override public void testIsInfinite() { checkUnsupported(() -> super.testIsInfinite()); } @Test @Override public void testEbeMultiplySameType() { checkUnsupported(() -> super.testEbeMultiplySameType()); } @Test @Override public void testEbeMultiplyMixedTypes() { checkUnsupported(() -> super.testEbeMultiplyMixedTypes()); } @Test @Override public void testEbeMultiplyDimensionMismatch() { checkUnsupported(() -> super.testEbeMultiplyDimensionMismatch()); } @Test @Override public void testEbeDivideSameType() { checkUnsupported(() -> super.testEbeDivideSameType()); } @Test @Override public void testEbeDivideMixedTypes() { checkUnsupported(() -> super.testEbeDivideMixedTypes()); } @Test @Override public void testEbeDivideDimensionMismatch() { checkUnsupported(() -> super.testEbeDivideDimensionMismatch()); } @Test public void testSparseIterator() { /* * For non-default values, use x + 1, x + 2, etc... to make sure that * these values are really different from x. */ final double x = getPreferredEntryValue(); final double[] data = { x, x + 1d, x, x, x + 2d, x + 3d, x + 4d, x, x, x, x + 5d, x + 6d, x }; RealVector v = create(data); Entry e; int i = 0; final double[] nonDefault = { x + 1d, x + 2d, x + 3d, x + 4d, x + 5d, x + 6d }; for (Iterator<Entry> it = v.sparseIterator(); it.hasNext(); i++) { e = it.next(); Assert.assertEquals(nonDefault[i], e.getValue(), 0); } double [] onlyOne = {x, x + 1d, x}; v = create(onlyOne); for(Iterator<Entry> it = v.sparseIterator(); it.hasNext(); ) { e = it.next(); Assert.assertEquals(onlyOne[1], e.getValue(), 0); } } @Test @Override public void testSerial() { checkUnsupported(() -> super.testSerial()); } @Test @Override public void testEquals() { checkUnsupported(() -> super.testEquals()); } interface Thunk { void call(); } private void checkUnsupported(final Thunk t) { try { t.call(); Assert.fail("an exception should have been thrown"); } catch (MathRuntimeException mre) { Assert.assertEquals(LocalizedCoreFormats.UNSUPPORTED_OPERATION, mre.getSpecifier()); } catch (UnsupportedOperationException uoe) { // expected } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.grid.e2e.node; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.openqa.grid.common.GridRole; import org.openqa.grid.e2e.utils.GridTestHelper; import org.openqa.grid.e2e.utils.RegistryTestHelper; import org.openqa.grid.internal.ExternalSessionKey; import org.openqa.grid.internal.GridRegistry; import org.openqa.grid.internal.TestSession; import org.openqa.grid.internal.exception.NewSessionException; import org.openqa.grid.internal.utils.SelfRegisteringRemote; import org.openqa.grid.web.Hub; import org.openqa.grid.web.servlet.handler.RequestHandler; import org.openqa.grid.web.servlet.handler.RequestType; import org.openqa.grid.web.servlet.handler.SeleniumBasedRequest; import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.remote.BrowserType; import org.openqa.selenium.remote.CapabilityType; import org.openqa.selenium.remote.DesiredCapabilities; import org.openqa.selenium.remote.server.SeleniumServer; import java.io.IOException; import java.net.MalformedURLException; import java.util.HashMap; import java.util.Map; import javax.servlet.http.HttpServletRequest; public class DefaultProxyFindsFirefoxLocationsTest { private static final String LOCATION_FF_7 = "/home/ff7"; private static final String LOCATION_FF_3 = "c:\\program files\\ff3"; private static final String LOCATION_CHROME_27 = "/home/chrome27"; private static final String LOCATION_CHROME_29 = "c:\\program files\\Chrome29.exe"; private Hub hub; private GridRegistry registry; private SelfRegisteringRemote remote; @Before public void prepare() throws Exception { hub = GridTestHelper.getHub(); registry = hub.getRegistry(); registry.setThrowOnCapabilityNotPresent(false); remote = GridTestHelper.getRemoteWithoutCapabilities(hub.getUrl(), GridRole.NODE); remote.setMaxConcurrent(100); DesiredCapabilities caps = null; // firefox caps = DesiredCapabilities.firefox(); caps.setCapability(FirefoxDriver.BINARY, LOCATION_FF_7); caps.setVersion("7"); remote.addBrowser(caps, 1); caps = DesiredCapabilities.firefox(); caps.setCapability(FirefoxDriver.BINARY, LOCATION_FF_3); caps.setVersion("3"); remote.addBrowser(caps, 1); caps = DesiredCapabilities.firefox(); caps.setCapability(FirefoxDriver.BINARY, "should be overwritten"); caps.setVersion("20"); remote.addBrowser(caps, 1); // chrome caps = DesiredCapabilities.chrome(); caps.setCapability("chrome_binary", LOCATION_CHROME_27); caps.setVersion("27"); remote.addBrowser(caps, 1); caps = DesiredCapabilities.chrome(); caps.setCapability("chrome_binary", LOCATION_CHROME_29); caps.setVersion("29"); remote.addBrowser(caps, 2); caps = DesiredCapabilities.chrome(); caps.setCapability("chrome_binary", "should be overwritten"); caps.setVersion("30"); remote.addBrowser(caps, 1); remote.setRemoteServer(new SeleniumServer(remote.getConfiguration())); remote.startRemoteServer(); remote.sendRegistrationRequest(); RegistryTestHelper.waitForNode(registry, 1); } @Test(timeout = 5000) public void testBrowserLocations() throws MalformedURLException { Map<String, Object> req_caps = null; RequestHandler newSessionRequest = null; // firefox req_caps = new HashMap<>(); req_caps.put(CapabilityType.BROWSER_NAME, BrowserType.FIREFOX); req_caps.put(CapabilityType.VERSION, "7"); newSessionRequest = new MockedRequestHandler(getNewRequest(req_caps)); newSessionRequest.process(); assertEquals(LOCATION_FF_7, newSessionRequest.getSession().getRequestedCapabilities() .get(FirefoxDriver.BINARY)); req_caps = new HashMap<>(); req_caps.put(CapabilityType.BROWSER_NAME, BrowserType.FIREFOX); req_caps.put(CapabilityType.VERSION, "3"); newSessionRequest = new MockedRequestHandler(getNewRequest(req_caps)); newSessionRequest.process(); assertEquals(LOCATION_FF_3, newSessionRequest.getSession().getRequestedCapabilities() .get(FirefoxDriver.BINARY)); req_caps = new HashMap<>(); req_caps.put(CapabilityType.BROWSER_NAME, BrowserType.FIREFOX); req_caps.put(CapabilityType.VERSION, "20"); req_caps.put(FirefoxDriver.BINARY, "custom"); newSessionRequest = new MockedRequestHandler(getNewRequest(req_caps)); newSessionRequest.process(); assertEquals("custom", newSessionRequest.getSession().getRequestedCapabilities() .get(FirefoxDriver.BINARY)); // chrome req_caps = new HashMap<>(); req_caps.put(CapabilityType.BROWSER_NAME, BrowserType.CHROME); req_caps.put(CapabilityType.VERSION, "27"); newSessionRequest = new MockedRequestHandler(getNewRequest(req_caps)); newSessionRequest.process(); Map<String, Object> json = (Map<String, Object>) newSessionRequest.getSession().getRequestedCapabilities().get(ChromeOptions.CAPABILITY); assertEquals(LOCATION_CHROME_27, json.get("binary")); req_caps = new HashMap<>(); req_caps.put(CapabilityType.BROWSER_NAME, BrowserType.CHROME); req_caps.put(CapabilityType.VERSION, "29"); newSessionRequest = new MockedRequestHandler(getNewRequest(req_caps)); newSessionRequest.process(); json = (Map<String, Object>) newSessionRequest.getSession().getRequestedCapabilities().get(ChromeOptions.CAPABILITY); assertEquals(LOCATION_CHROME_29, json.get("binary")); req_caps = new HashMap<>(); req_caps.put(CapabilityType.BROWSER_NAME, BrowserType.CHROME); req_caps.put(CapabilityType.VERSION, "29"); Map<String, Object> options = new HashMap<>(); options.put("test1", "test2"); req_caps.put(ChromeOptions.CAPABILITY, options); newSessionRequest = new MockedRequestHandler(getNewRequest(req_caps)); newSessionRequest.process(); json = (Map<String, Object>) newSessionRequest.getSession().getRequestedCapabilities().get(ChromeOptions.CAPABILITY); assertEquals(LOCATION_CHROME_29, json.get("binary")); assertEquals("test2", json.get("test1")); req_caps = new HashMap<>(); req_caps.put(CapabilityType.BROWSER_NAME, BrowserType.CHROME); req_caps.put(CapabilityType.VERSION, "30"); options = new HashMap<>(); options.put("test11", "test22"); options.put("binary", "custom"); req_caps.put(ChromeOptions.CAPABILITY, options); newSessionRequest = new MockedRequestHandler(getNewRequest(req_caps)); newSessionRequest.process(); json = (Map<String, Object>) newSessionRequest.getSession().getRequestedCapabilities().get(ChromeOptions.CAPABILITY); assertEquals("custom", json.get("binary")); assertEquals("test22", json.get("test11")); } @After public void teardown() throws Exception { remote.stopRemoteServer(); hub.stop(); } private SeleniumBasedRequest getNewRequest(Map<String, Object> desiredCapability) { HttpServletRequest httpreq = mock(HttpServletRequest.class); return new SeleniumBasedRequest(httpreq, registry, RequestType.START_SESSION, desiredCapability) { public ExternalSessionKey extractSession() { return null; } public RequestType extractRequestType() { return null; } public Map<String, Object> extractDesiredCapability() { return getDesiredCapabilities(); } }; } class MockedRequestHandler extends RequestHandler { public MockedRequestHandler(SeleniumBasedRequest request) { super(request,null, request.getRegistry()); } public void setSession(TestSession session) { super.setSession(session); } @Override protected void forwardRequest(TestSession session, RequestHandler handler) throws IOException {} @Override public void forwardNewSessionRequestAndUpdateRegistry(TestSession session) throws NewSessionException {} } }
package org.apache.lucene.util.automaton; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.*; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.UnicodeUtil; /** * Builds a minimal, deterministic {@link Automaton} that accepts a set of * strings. The algorithm requires sorted input data, but is very fast * (nearly linear with the input size). * * @see #build(Collection) * @see BasicAutomata#makeStringUnion(Collection) */ final class DaciukMihovAutomatonBuilder { /** * DFSA state with <code>char</code> labels on transitions. */ private final static class State { /** An empty set of labels. */ private final static int[] NO_LABELS = new int[0]; /** An empty set of states. */ private final static State[] NO_STATES = new State[0]; /** * Labels of outgoing transitions. Indexed identically to {@link #states}. * Labels must be sorted lexicographically. */ int[] labels = NO_LABELS; /** * States reachable from outgoing transitions. Indexed identically to * {@link #labels}. */ State[] states = NO_STATES; /** * <code>true</code> if this state corresponds to the end of at least one * input sequence. */ boolean is_final; /** * Returns the target state of a transition leaving this state and labeled * with <code>label</code>. If no such transition exists, returns * <code>null</code>. */ State getState(int label) { final int index = Arrays.binarySearch(labels, label); return index >= 0 ? states[index] : null; } /** * Two states are equal if: * <ul> * <li>they have an identical number of outgoing transitions, labeled with * the same labels</li> * <li>corresponding outgoing transitions lead to the same states (to states * with an identical right-language). * </ul> */ @Override public boolean equals(Object obj) { final State other = (State) obj; return is_final == other.is_final && Arrays.equals(this.labels, other.labels) && referenceEquals(this.states, other.states); } /** * Compute the hash code of the <i>current</i> status of this state. */ @Override public int hashCode() { int hash = is_final ? 1 : 0; hash ^= hash * 31 + this.labels.length; for (int c : this.labels) hash ^= hash * 31 + c; /* * Compare the right-language of this state using reference-identity of * outgoing states. This is possible because states are interned (stored * in registry) and traversed in post-order, so any outgoing transitions * are already interned. */ for (State s : this.states) { hash ^= System.identityHashCode(s); } return hash; } /** * Return <code>true</code> if this state has any children (outgoing * transitions). */ boolean hasChildren() { return labels.length > 0; } /** * Create a new outgoing transition labeled <code>label</code> and return * the newly created target state for this transition. */ State newState(int label) { assert Arrays.binarySearch(labels, label) < 0 : "State already has transition labeled: " + label; labels = Arrays.copyOf(labels, labels.length + 1); states = Arrays.copyOf(states, states.length + 1); labels[labels.length - 1] = label; return states[states.length - 1] = new State(); } /** * Return the most recent transitions's target state. */ State lastChild() { assert hasChildren() : "No outgoing transitions."; return states[states.length - 1]; } /** * Return the associated state if the most recent transition is labeled with * <code>label</code>. */ State lastChild(int label) { final int index = labels.length - 1; State s = null; if (index >= 0 && labels[index] == label) { s = states[index]; } assert s == getState(label); return s; } /** * Replace the last added outgoing transition's target state with the given * state. */ void replaceLastChild(State state) { assert hasChildren() : "No outgoing transitions."; states[states.length - 1] = state; } /** * Compare two lists of objects for reference-equality. */ private static boolean referenceEquals(Object[] a1, Object[] a2) { if (a1.length != a2.length) { return false; } for (int i = 0; i < a1.length; i++) { if (a1[i] != a2[i]) { return false; } } return true; } } /** * A "registry" for state interning. */ private HashMap<State,State> stateRegistry = new HashMap<State,State>(); /** * Root automaton state. */ private State root = new State(); /** * Previous sequence added to the automaton in {@link #add(CharsRef)}. */ private CharsRef previous; /** * A comparator used for enforcing sorted UTF8 order, used in assertions only. */ @SuppressWarnings("deprecation") private static final Comparator<CharsRef> comparator = CharsRef.getUTF16SortedAsUTF8Comparator(); /** * Add another character sequence to this automaton. The sequence must be * lexicographically larger or equal compared to any previous sequences added * to this automaton (the input must be sorted). */ public void add(CharsRef current) { assert stateRegistry != null : "Automaton already built."; assert previous == null || comparator.compare(previous, current) <= 0 : "Input must be in sorted UTF-8 order: " + previous + " >= " + current; assert setPrevious(current); // Descend in the automaton (find matching prefix). int pos = 0, max = current.length(); State next, state = root; while (pos < max && (next = state.lastChild(Character.codePointAt(current, pos))) != null) { state = next; // todo, optimize me pos += Character.charCount(Character.codePointAt(current, pos)); } if (state.hasChildren()) replaceOrRegister(state); addSuffix(state, current, pos); } /** * Finalize the automaton and return the root state. No more strings can be * added to the builder after this call. * * @return Root automaton state. */ public State complete() { if (this.stateRegistry == null) throw new IllegalStateException(); if (root.hasChildren()) replaceOrRegister(root); stateRegistry = null; return root; } /** * Internal recursive traversal for conversion. */ private static org.apache.lucene.util.automaton.State convert(State s, IdentityHashMap<State,org.apache.lucene.util.automaton.State> visited) { org.apache.lucene.util.automaton.State converted = visited.get(s); if (converted != null) return converted; converted = new org.apache.lucene.util.automaton.State(); converted.setAccept(s.is_final); visited.put(s, converted); int i = 0; int[] labels = s.labels; for (DaciukMihovAutomatonBuilder.State target : s.states) { converted.addTransition( new Transition(labels[i++], convert(target, visited))); } return converted; } /** * Build a minimal, deterministic automaton from a sorted list of {@link BytesRef} representing * strings in UTF-8. These strings must be binary-sorted. */ public static Automaton build(Collection<BytesRef> input) { final DaciukMihovAutomatonBuilder builder = new DaciukMihovAutomatonBuilder(); CharsRef scratch = new CharsRef(); for (BytesRef b : input) { UnicodeUtil.UTF8toUTF16(b, scratch); builder.add(scratch); } Automaton a = new Automaton(); a.initial = convert( builder.complete(), new IdentityHashMap<State,org.apache.lucene.util.automaton.State>()); a.deterministic = true; return a; } /** * Copy <code>current</code> into an internal buffer. */ private boolean setPrevious(CharsRef current) { // don't need to copy, once we fix https://issues.apache.org/jira/browse/LUCENE-3277 // still, called only from assert previous = CharsRef.deepCopyOf(current); return true; } /** * Replace last child of <code>state</code> with an already registered state * or stateRegistry the last child state. */ private void replaceOrRegister(State state) { final State child = state.lastChild(); if (child.hasChildren()) replaceOrRegister(child); final State registered = stateRegistry.get(child); if (registered != null) { state.replaceLastChild(registered); } else { stateRegistry.put(child, child); } } /** * Add a suffix of <code>current</code> starting at <code>fromIndex</code> * (inclusive) to state <code>state</code>. */ private void addSuffix(State state, CharSequence current, int fromIndex) { final int len = current.length(); while (fromIndex < len) { int cp = Character.codePointAt(current, fromIndex); state = state.newState(cp); fromIndex += Character.charCount(cp); } state.is_final = true; } }
package cz.creeper.customitemlibrary; import com.flowpowered.math.vector.Vector3i; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import cz.creeper.customitemlibrary.data.CustomItemLibraryKeys; import cz.creeper.customitemlibrary.data.mutable.CustomFeatureData; import cz.creeper.customitemlibrary.feature.CustomFeature; import cz.creeper.customitemlibrary.feature.CustomFeatureDefinition; import cz.creeper.customitemlibrary.feature.CustomFeatureRegistry; import cz.creeper.customitemlibrary.feature.CustomFeatureRegistryMap; import cz.creeper.customitemlibrary.feature.DurabilityRegistry; import cz.creeper.customitemlibrary.feature.block.CustomBlock; import cz.creeper.customitemlibrary.feature.block.CustomBlockDefinition; import cz.creeper.customitemlibrary.feature.block.simple.SimpleCustomBlock; import cz.creeper.customitemlibrary.feature.block.simple .SimpleCustomBlockDefinition; import cz.creeper.customitemlibrary.feature.block.simple .SimpleCustomBlockRegistry; import cz.creeper.customitemlibrary.feature.inventory.CustomInventory; import cz.creeper.customitemlibrary.feature.inventory.CustomInventoryDefinition; import cz.creeper.customitemlibrary.feature.inventory.simple .SimpleCustomInventoryDefinition; import cz.creeper.customitemlibrary.feature.inventory.simple .SimpleCustomInventoryRegistry; import cz.creeper.customitemlibrary.feature.item.CustomItem; import cz.creeper.customitemlibrary.feature.item.CustomItemDefinition; import cz.creeper.customitemlibrary.feature.item.material .CustomMaterialDefinition; import cz.creeper.customitemlibrary.feature.item.material .CustomMaterialRegistry; import cz.creeper.customitemlibrary.feature.item.tool.CustomToolDefinition; import cz.creeper.customitemlibrary.feature.item.tool.CustomToolRegistry; import cz.creeper.customitemlibrary.util.Block; import lombok.ToString; import lombok.val; import org.spongepowered.api.Sponge; import org.spongepowered.api.asset.Asset; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.data.Transaction; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.entity.living.ArmorStand; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.Order; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.event.world.chunk.LoadChunkEvent; import org.spongepowered.api.event.world.chunk.PopulateChunkEvent; import org.spongepowered.api.plugin.PluginContainer; import org.spongepowered.api.scheduler.Task; import org.spongepowered.api.util.AABB; import org.spongepowered.api.util.Identifiable; import org.spongepowered.api.world.Chunk; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Iterator; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.Spliterator; import java.util.Spliterators; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @ToString public class CustomItemServiceImpl implements CustomItemService { public static final String DIRECTORY_NAME_REGISTRIES = "registries"; public static final String DIRECTORY_NAME_RESOURCEPACK = "resourcepack"; public static final String FILE_NAME_PACK = "pack.mcmeta"; private final CustomFeatureRegistryMap registryMap = new CustomFeatureRegistryMap(); private final Map<String, Map<String, CustomItemDefinition<? extends CustomItem>>> pluginIdsToTypeIdsToItemDefinitions = Maps.newHashMap(); private final Map<String, Map<String, CustomBlockDefinition<? extends CustomBlock>>> pluginIdsToTypeIdsToBlockDefinitions = Maps.newHashMap(); private final Map<String, Map<String, CustomInventoryDefinition<? extends CustomInventory>>> pluginIdsToTypeIdsToInventoryDefinitions = Maps.newHashMap(); private final Map<Block, Optional<UUID>> blockToArmorStand = Maps.newHashMap(); private final Map<Block, CustomBlock<? extends CustomBlockDefinition>> loadedBlocks = Maps.newHashMap(); private Task customBlockUpdateTask; public CustomItemServiceImpl() { registryMap.put(CustomToolDefinition.class, CustomToolRegistry.getInstance()); registryMap.put(CustomMaterialDefinition.class, CustomMaterialRegistry.getInstance()); registryMap.put(SimpleCustomBlockDefinition.class, SimpleCustomBlockRegistry.getInstance()); registryMap.put(SimpleCustomInventoryDefinition.class, SimpleCustomInventoryRegistry.getInstance()); registryMap.values().forEach(registry -> Sponge.getEventManager().registerListeners( CustomItemLibrary.getInstance(), registry )); Sponge.getEventManager().registerListeners(CustomItemLibrary.getInstance(), this); } @SuppressWarnings("unchecked") @Override public <I extends CustomFeature<T>, T extends CustomFeatureDefinition<I>> void register(T definition) { Preconditions.checkArgument(!"minecraft".equals(definition.getPluginContainer().getId()), "The plugin id must not be 'minecraft'."); Optional<CustomFeatureRegistry<I, T>> registry = registryMap.get(definition); if(!registry.isPresent()) throw new IllegalArgumentException("Invalid definition type."); if(definition instanceof CustomItemDefinition) { val typeIdsToDefinitions = getTypeIdsToItemDefinitions(definition.getPluginContainer()); if (typeIdsToDefinitions.containsKey(definition.getTypeId())) throw new IllegalStateException("A custom feature definition with ID \"" + definition.getTypeId() + "\" is already registered!"); typeIdsToDefinitions.put(definition.getTypeId(), (CustomItemDefinition<? extends CustomItem>) definition); } else if(definition instanceof CustomBlockDefinition) { val typeIdsToDefinitions = getTypeIdsToBlockDefinitions(definition.getPluginContainer()); if (typeIdsToDefinitions.containsKey(definition.getTypeId())) throw new IllegalStateException("A custom feature definition with ID \"" + definition.getTypeId() + "\" is already registered!"); typeIdsToDefinitions.put(definition.getTypeId(), (CustomBlockDefinition<? extends CustomBlock>) definition); } else if(definition instanceof CustomInventoryDefinition) { val typeIdsToDefinitions = getTypeIdsToInventoryDefinitions(definition.getPluginContainer()); if (typeIdsToDefinitions.containsKey(definition.getTypeId())) throw new IllegalStateException("A custom feature definition with ID \"" + definition.getTypeId() + "\" is already registered!"); typeIdsToDefinitions.put(definition.getTypeId(), (CustomInventoryDefinition<? extends CustomBlock>) definition); } else { throw new IllegalArgumentException("Invalid custom definition type. It must extend either CustomItemDefinition or CustomBlockDefinition."); } registry.get().register(definition); } @Override public void loadRegistry() { Path directory = getDirectoryRegistries(); DurabilityRegistry.getInstance().load(directory); } @Override public void saveRegistry() { Path directory = getDirectoryRegistries(); DurabilityRegistry.getInstance().save(directory); } public void prepare() { registerLoadedBlocks(); submitUpdateTask(); // It doesn't work with a method reference //noinspection Convert2MethodRef registryMap.values().forEach(registry -> registry.prepare()); } public Path generateResourcePack() { Path directory = getDirectoryResourcePack(); // Copy over the pack.mcmeta file Path packFile = directory.resolve(FILE_NAME_PACK); try { Files.createDirectories(directory); if (Files.exists(packFile)) Files.delete(packFile); Asset pack = Sponge.getAssetManager().getAsset(CustomItemLibrary.getInstance(), FILE_NAME_PACK) .orElseThrow(() -> new IllegalStateException("Could not access the 'pack.mcmeta' asset.")); pack.copyToFile(packFile); } catch(IOException e) { CustomItemLibrary.getInstance().getLogger() .warn("Could not create the 'pack.mcmeta' file."); e.printStackTrace(); } getDefinitions().forEach(customFeatureDefinition -> customFeatureDefinition.generateResourcePackFiles(directory)); registryMap.values().forEach(registry -> registry.generateResourcePackFiles(directory)); DurabilityRegistry.getInstance().generateResourcePack(directory); return directory; } @Override public Set<CustomItemDefinition<? extends CustomItem>> getItemDefinitions() { return pluginIdsToTypeIdsToItemDefinitions.values().stream() .flatMap(typeIdsToDefinitions -> typeIdsToDefinitions.values().stream()) .collect(Collectors.toSet()); } @Override public Set<CustomBlockDefinition<? extends CustomBlock>> getBlockDefinitions() { return pluginIdsToTypeIdsToBlockDefinitions.values().stream() .flatMap(typeIdsToDefinitions -> typeIdsToDefinitions.values().stream()) .collect(Collectors.toSet()); } @Override public Set<CustomInventoryDefinition<? extends CustomInventory>> getInventoryDefinitions() { return pluginIdsToTypeIdsToInventoryDefinitions.values().stream() .flatMap(typeIdsToDefinitions -> typeIdsToDefinitions.values().stream()) .collect(Collectors.toSet()); } @Override public Optional<CustomBlockDefinition<? extends CustomBlock>> getBlockDefinition(Block block) { return getArmorStandAt(block).flatMap(armorStand -> { String pluginId = armorStand.get(CustomItemLibraryKeys.CUSTOM_FEATURE_PLUGIN_ID).get(); String typeId = armorStand.get(CustomItemLibraryKeys.CUSTOM_FEATURE_TYPE_ID).get(); return getBlockDefinition(pluginId, typeId) .filter(CustomBlockDefinition.class::isInstance) .map(CustomBlockDefinition.class::cast); }); } public void setArmorStandAt(Block block, ArmorStand armorStand) { blockToArmorStand.put(block, Optional.of(armorStand.getUniqueId())); } public Optional<? extends CustomBlock<? extends CustomBlockDefinition>> getBlock(ArmorStand armorStand) { return getBlockDefinition(armorStand).flatMap(definition -> { Block block = Block.of(armorStand.getLocation()); blockToArmorStand.put(block, Optional.of(armorStand.getUniqueId())); return definition.wrapIfPossible(block); }); } public Optional<CustomBlockDefinition<? extends CustomBlock>> getBlockDefinition(ArmorStand armorStand) { return armorStand.get(CustomFeatureData.class).flatMap(data -> getBlockDefinition(data.customFeaturePluginId().get(), data.customFeatureTypeId().get())); } public Optional<ArmorStand> getArmorStandAt(Block block) { return blockToArmorStand.computeIfAbsent(block, k -> findArmorStandAt(k).map(Identifiable::getUniqueId)) .flatMap(id -> block.getWorld().flatMap(extent -> extent.getEntity(id)).map(ArmorStand.class::cast)); } public Optional<ArmorStand> findArmorStandAt(Block block) { Set<ArmorStand> armorStands = findArmorStandsAt(block); if (armorStands.isEmpty()) return Optional.empty(); Iterator<ArmorStand> armorStandIterator = armorStands.iterator(); ArmorStand armorStand = armorStandIterator.next(); while (armorStandIterator.hasNext()) { armorStandIterator.next().remove(); } return Optional.of(armorStand); } public Set<ArmorStand> findArmorStandsAt(Block block) { Chunk chunk = block.getChunk() .orElseThrow(() -> new IllegalStateException("Could not access the chunk of this block.")); Vector3i blockPosition = block.getPosition(); AABB aabb = new AABB(blockPosition, blockPosition.add(Vector3i.ONE)); Set<Entity> entities = chunk.getIntersectingEntities(aabb, CustomItemServiceImpl::isCustomBlockArmorStand); Iterator<Entity> entityIterator = entities.iterator(); return StreamSupport.stream(Spliterators.spliteratorUnknownSize(entityIterator, Spliterator.ORDERED), false) .map(ArmorStand.class::cast) .collect(Collectors.toSet()); } public static boolean isCustomBlockArmorStand(Entity entity) { return entity instanceof ArmorStand && entity.get(CustomFeatureData.class).isPresent(); } public boolean removeArmorStandsAt(Block block) { findArmorStandsAt(block).forEach(Entity::remove); return blockToArmorStand.remove(block) != null; } @Override public Optional<CustomItemDefinition<? extends CustomItem>> getItemDefinition(String pluginId, String typeId) { Map<String, CustomItemDefinition<? extends CustomItem>> typeIdsToDefinitions = pluginIdsToTypeIdsToItemDefinitions.get(pluginId); if(typeIdsToDefinitions == null) return Optional.empty(); return Optional.ofNullable(typeIdsToDefinitions.get(typeId)); } @Override public Optional<CustomBlockDefinition<? extends CustomBlock>> getBlockDefinition(String pluginId, String typeId) { Map<String, CustomBlockDefinition<? extends CustomBlock>> typeIdsToDefinitions = pluginIdsToTypeIdsToBlockDefinitions.get(pluginId); if(typeIdsToDefinitions == null) return Optional.empty(); return Optional.ofNullable(typeIdsToDefinitions.get(typeId)); } @Override public Optional<CustomInventoryDefinition<? extends CustomInventory>> getInventoryDefinition(String pluginId, String typeId) { Map<String, CustomInventoryDefinition<? extends CustomInventory>> typeIdsToDefinitions = pluginIdsToTypeIdsToInventoryDefinitions.get(pluginId); if(typeIdsToDefinitions == null) return Optional.empty(); return Optional.ofNullable(typeIdsToDefinitions.get(typeId)); } private Map<String, CustomItemDefinition<? extends CustomItem>> getTypeIdsToItemDefinitions(PluginContainer pluginContainer) { return getTypeIdsToItemDefinitions(pluginContainer.getId()); } private Map<String, CustomItemDefinition<? extends CustomItem>> getTypeIdsToItemDefinitions(String pluginId) { return pluginIdsToTypeIdsToItemDefinitions.computeIfAbsent(pluginId, k -> Maps.newHashMap()); } private Map<String, CustomBlockDefinition<? extends CustomBlock>> getTypeIdsToBlockDefinitions(PluginContainer pluginContainer) { return getTypeIdsToBlockDefinitions(pluginContainer.getId()); } private Map<String, CustomBlockDefinition<? extends CustomBlock>> getTypeIdsToBlockDefinitions(String pluginId) { return pluginIdsToTypeIdsToBlockDefinitions.computeIfAbsent(pluginId, k -> Maps.newHashMap()); } private Map<String, CustomInventoryDefinition<? extends CustomInventory>> getTypeIdsToInventoryDefinitions(PluginContainer pluginContainer) { return getTypeIdsToInventoryDefinitions(pluginContainer.getId()); } private Map<String, CustomInventoryDefinition<? extends CustomInventory>> getTypeIdsToInventoryDefinitions(String pluginId) { return pluginIdsToTypeIdsToInventoryDefinitions.computeIfAbsent(pluginId, k -> Maps.newHashMap()); } public <I extends CustomFeature<T>, T extends CustomFeatureDefinition<I>> Optional<CustomFeatureRegistry<I, T>> getRegistry(T definition) { return registryMap.get(definition); } @Listener(order = Order.BEFORE_POST) public void onChangeBlockBreak(ChangeBlockEvent event) { event.getTransactions().stream() .filter(Transaction::isValid) .forEach(blockSnapshotTransaction -> { BlockSnapshot original = blockSnapshotTransaction.getOriginal(); original.getLocation().map(Block::of).ifPresent(this::removeArmorStandsAt); }); } private void registerLoadedBlocks() { Sponge.getServer().getWorlds().stream() .flatMap(world -> StreamSupport.stream(world.getLoadedChunks().spliterator(), false)) .forEach(this::registerBlocksInChunk); } private void registerBlocksInChunk(Chunk chunk) { CustomItemServiceImpl service = CustomItemLibrary.getInstance().getService(); chunk.getEntities().stream() .filter(ArmorStand.class::isInstance) .map(ArmorStand.class::cast) .map(service::getBlock) .filter(Optional::isPresent) .map(Optional::get) .filter(SimpleCustomBlock.class::isInstance) .map(SimpleCustomBlock.class::cast) .forEach(customBlock -> loadedBlocks.put(customBlock.getBlock(), customBlock)); } public Optional<CustomBlock<? extends CustomBlockDefinition>> registerBlockAsLoaded(CustomBlock<? extends CustomBlockDefinition> block) { return Optional.ofNullable(loadedBlocks.put(block.getBlock(), block)); } @Listener public void onLoadChunk(LoadChunkEvent event) { registerBlocksInChunk(event.getTargetChunk()); } @Listener public void onPostPopulateChunk(PopulateChunkEvent.Post event) { registerBlocksInChunk(event.getTargetChunk()); } private void submitUpdateTask() { if(customBlockUpdateTask != null) customBlockUpdateTask.cancel(); customBlockUpdateTask = Sponge.getScheduler().createTaskBuilder() .name("Custom block update task") .intervalTicks(1) .execute(this::update) .submit(CustomItemLibrary.getInstance()); } private void update() { Iterator<CustomBlock<? extends CustomBlockDefinition>> iterator = loadedBlocks.values().iterator(); while(iterator.hasNext()) { CustomBlock<? extends CustomBlockDefinition> customBlock = iterator.next(); if(customBlock.isAccessible()) { customBlock.update(); } else { iterator.remove(); } } } public Optional<CustomBlock<? extends CustomBlockDefinition>> unregisterBlockAsLoaded(Block block) { return Optional.ofNullable(loadedBlocks.remove(block)); } public static Path getDirectoryRegistries() { return CustomItemLibrary.getInstance().getConfigPath() .resolveSibling(DIRECTORY_NAME_REGISTRIES); } public static Path getDirectoryResourcePack() { return CustomItemLibrary.getInstance().getConfigPath() .resolveSibling(DIRECTORY_NAME_RESOURCEPACK); } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.assertions.bpmn; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.engine.test.Deployment; import org.camunda.bpm.engine.test.ProcessEngineRule; import org.camunda.bpm.engine.test.assertions.helpers.Failure; import org.camunda.bpm.engine.test.assertions.helpers.ProcessAssertTestCase; import org.junit.Rule; import org.junit.Test; import static org.camunda.bpm.engine.test.assertions.ProcessEngineTests.*; /** * @author Martin Schimak (martin.schimak@plexiti.com) */ public class ProcessInstanceAssertHasPassedTest extends ProcessAssertTestCase { @Rule public ProcessEngineRule processEngineRule = new ProcessEngineRule(); @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-hasPassed.bpmn" }) public void testHasPassed_OnlyActivity_RunningInstance_Success() { // Given ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-hasPassed" ); // When complete(taskQuery().singleResult()); // Then assertThat(processInstance).hasPassed("UserTask_1"); } @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-hasPassed.bpmn" }) public void testHasPassed_OnlyActivity_RunningInstance_Failure() { // When final ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-hasPassed" ); // Then expect(new Failure() { @Override public void when() { assertThat(processInstance).hasPassed("UserTask_1"); } }); } @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-hasPassed.bpmn" }) public void testHasPassed_ParallelActivities_RunningInstance_Success() { // Given ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-hasPassed" ); // When complete(taskQuery().singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_2").singleResult()); // Then assertThat(processInstance).hasPassed("UserTask_1"); // And assertThat(processInstance).hasPassed("UserTask_2"); // And assertThat(processInstance).hasPassed("UserTask_1", "UserTask_2"); } @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-hasPassed.bpmn" }) public void testHasPassed_ParallelActivities_RunningInstance_Failure() { // Given final ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-hasPassed" ); // When complete(taskQuery().singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_2").singleResult()); // Then expect(new Failure() { @Override public void when() { assertThat(processInstance).hasPassed("UserTask_3"); } }); expect(new Failure() { @Override public void when() { assertThat(processInstance).hasPassed("UserTask_4"); } }); } @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-hasPassed.bpmn" }) public void testHasPassed_SeveralActivities_RunningInstance_Success() { // Given ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-hasPassed" ); // When complete(taskQuery().singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_2").singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_3").singleResult()); // Then assertThat(processInstance).hasPassed("UserTask_1"); // And assertThat(processInstance).hasPassed("UserTask_2"); // And assertThat(processInstance).hasPassed("UserTask_3"); // And assertThat(processInstance).hasPassed("UserTask_1", "UserTask_2", "UserTask_3"); } @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-hasPassed.bpmn" }) public void testHasPassed_SeveralActivities_RunningInstance_Failure() { // Given final ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-hasPassed" ); // When complete(taskQuery().singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_2").singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_3").singleResult()); // Then expect(new Failure() { @Override public void when() { assertThat(processInstance).hasPassed("UserTask_4"); } }); } @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-hasPassed.bpmn" }) public void testHasPassed_SeveralActivities_HistoricInstance_Success() { // Given ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-hasPassed" ); // When complete(taskQuery().singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_2").singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_3").singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_4").singleResult()); // Then assertThat(processInstance).hasPassed("UserTask_1"); // And assertThat(processInstance).hasPassed("UserTask_2"); // And assertThat(processInstance).hasPassed("UserTask_3"); // And assertThat(processInstance).hasPassed("UserTask_4"); // And assertThat(processInstance).hasPassed("UserTask_1", "UserTask_2", "UserTask_3", "UserTask_4"); } @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-hasPassed.bpmn" }) public void testHasPassed_SeveralActivities_HistoricInstance_Failure() { // Given final ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-hasPassed" ); // When complete(taskQuery().singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_2").singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_3").singleResult()); // And complete(taskQuery().taskDefinitionKey("UserTask_4").singleResult()); // Then expect(new Failure() { @Override public void when() { assertThat(processInstance).hasPassed("UserTask_5"); } }); } @Test @Deployment(resources = {"bpmn/ProcessInstanceAssert-isWaitingAt.bpmn" }) public void testHasPassed_Null_Error() { // When final ProcessInstance processInstance = runtimeService().startProcessInstanceByKey( "ProcessInstanceAssert-isWaitingAt" ); // Then expect(new Failure() { @Override public void when() { String[] passed = null; assertThat(processInstance).hasPassed(passed); } }); expect(new Failure() { @Override public void when() { assertThat(processInstance).hasPassed("ok", null); } }); expect(new Failure() { @Override public void when() { assertThat(processInstance).hasPassed(null, "ok"); } }); expect(new Failure() { @Override public void when() { String[] args = new String[]{}; assertThat(processInstance).hasPassed(args); } }); } }
/* * Copyright (c) 2016, Salesforce.com, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of Salesforce.com nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.dva.argus.service.tsdb; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Test; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.module.SimpleModule; import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.MetricService; import com.salesforce.dva.argus.service.TSDBService; import com.salesforce.dva.argus.service.metric.transform.TransformFactory; public class MetricFederationTest extends AbstractTest { private ObjectMapper _mapper; @Before public void initialize() { _mapper = new ObjectMapper(); SimpleModule module = new SimpleModule(); module.addSerializer(Metric.class, new MetricTransform.Serializer()); module.addDeserializer(ResultSet.class, new MetricTransform.MetricListDeserializer()); _mapper.registerModule(module); } @Test public void testTimeFederationFork() { MetricService metricService = system.getServiceFactory().getMetricService(); List<MetricQuery> queries = metricService.getQueries("-2d:scope:metric{tagk=tagv}:avg:15m-avg"); QueryFederation queryFederation = new TimeQueryFederation(); Map<MetricQuery, List<MetricQuery>> mapQuerySubQueries = queryFederation.federateQueries(queries); /* Since the difference for large range values of timestamp is not exactly 2d */ assertTrue(mapQuerySubQueries.get(queries.get(0)).size() == 3); } @Test public void testEndPointFederationForkJoinSumDownsamplerWithTag() { MetricService metricService = system.getServiceFactory().getMetricService(); List<MetricQuery> queries = metricService.getQueries("-1h:scope:metric{tagk=tagv}:sum:15m-sum"); List<String> readEndPoints = new ArrayList<String>(); readEndPoints.add("http://localhost:4477"); readEndPoints.add("http://localhost:4488"); String content1 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"host\":\"machineHost1\",\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":4.940423168E9}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"host\":\"machineHost3\",\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386500\":4.940423168E9}}]"; String content2 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"host\":\"machineHost2\",\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":4.940423168E9}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"host\":\"machineHost3\",\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386600\":4.940423168E9}}]"; QueryFederation queryFederation = new EndPointQueryFederation(readEndPoints); Map<MetricQuery, List<MetricQuery>> mapQuerySubQueries = queryFederation.federateQueries(queries); assertEquals(2, mapQuerySubQueries.get(queries.get(0)).size()); Map<MetricQuery, List<Metric>> subQueryMetricsMap = new HashMap<MetricQuery, List<Metric>>(); List<MetricQuery> subQueries = mapQuerySubQueries.get(queries.get(0)); subQueryMetricsMap.put(subQueries.get(0), getMetricsFromMetricString(content1)); subQueryMetricsMap.put(subQueries.get(1), getMetricsFromMetricString(content2)); Map<MetricQuery, List<Metric>> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(3, queryMetricsMap.get(queries.get(0)).size()); // Three time series assertEquals("{host=machineHost1}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=4.940423168E9}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); assertEquals("{host=machineHost2}", queryMetricsMap.get(queries.get(0)).get(1).getTags().toString()); assertEquals("{1477386300=4.940423168E9}", queryMetricsMap.get(queries.get(0)).get(1).getDatapoints().toString()); assertEquals("{host=machineHost3}", queryMetricsMap.get(queries.get(0)).get(2).getTags().toString()); assertEquals("{1477386500=4.940423168E9, 1477386600=4.940423168E9}", queryMetricsMap.get(queries.get(0)).get(2).getDatapoints().toString()); } @Test public void testEndPointFederationForkJoinSumDownsamplerWithNoTag() { MetricService metricService = system.getServiceFactory().getMetricService(); List<MetricQuery> queries = metricService.getQueries("-1h:scope:metric{tagk=tagv}:sum:15m-sum"); List<String> readEndPoints = new ArrayList<String>(); readEndPoints.add("http://localhost:4477"); readEndPoints.add("http://localhost:4488"); String content1 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":3}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386500\":6}}]"; String content2 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":4}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386600\":7}}]"; QueryFederation queryFederation = new EndPointQueryFederation(readEndPoints); Map<MetricQuery, List<MetricQuery>> mapQuerySubQueries = queryFederation.federateQueries(queries); assertEquals(2, mapQuerySubQueries.get(queries.get(0)).size()); Map<MetricQuery, List<Metric>> subQueryMetricsMap = new HashMap<MetricQuery, List<Metric>>(); List<MetricQuery> subQueries = mapQuerySubQueries.get(queries.get(0)); subQueryMetricsMap.put(subQueries.get(0), getMetricsFromMetricString(content1)); subQueryMetricsMap.put(subQueries.get(1), getMetricsFromMetricString(content2)); Map<MetricQuery, List<Metric>> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(1, queryMetricsMap.get(queries.get(0)).size()); // One time series, since no tag specified assertEquals("{}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=7.0, 1477386500=6.0, 1477386600=7.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); } @Test public void testEndPointFederationForkJoinMinDownsampler() { MetricService metricService = system.getServiceFactory().getMetricService(); List<MetricQuery> queries = metricService.getQueries("-1h:scope:metric{tagk=tagv}:sum:15m-min"); List<String> readEndPoints = new ArrayList<String>(); readEndPoints.add("http://localhost:4477"); readEndPoints.add("http://localhost:4488"); String content1 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":3}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386500\":6}}]"; String content2 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":4}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386600\":7}}]"; QueryFederation queryFederation = new EndPointQueryFederation(readEndPoints); Map<MetricQuery, List<MetricQuery>> mapQuerySubQueries = queryFederation.federateQueries(queries); assertEquals(2, mapQuerySubQueries.get(queries.get(0)).size()); Map<MetricQuery, List<Metric>> subQueryMetricsMap = new HashMap<MetricQuery, List<Metric>>(); List<MetricQuery> subQueries = mapQuerySubQueries.get(queries.get(0)); subQueryMetricsMap.put(subQueries.get(0), getMetricsFromMetricString(content1)); subQueryMetricsMap.put(subQueries.get(1), getMetricsFromMetricString(content2)); Map<MetricQuery, List<Metric>> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(1, queryMetricsMap.get(queries.get(0)).size()); // One time series, since no tag specified assertEquals("{}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=3.0, 1477386500=6.0, 1477386600=7.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); } @Test public void testEndPointFederationForkJoinMaxDownsampler() { MetricService metricService = system.getServiceFactory().getMetricService(); List<MetricQuery> queries = metricService.getQueries("-1h:scope:metric{tagk=tagv}:sum:15m-max"); List<String> readEndPoints = new ArrayList<String>(); readEndPoints.add("http://localhost:4477"); readEndPoints.add("http://localhost:4488"); String content1 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":3}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386500\":6}}]"; String content2 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":4}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386600\":7}}]"; QueryFederation queryFederation = new EndPointQueryFederation(readEndPoints); Map<MetricQuery, List<MetricQuery>> mapQuerySubQueries = queryFederation.federateQueries(queries); assertEquals(2, mapQuerySubQueries.get(queries.get(0)).size()); Map<MetricQuery, List<Metric>> subQueryMetricsMap = new HashMap<MetricQuery, List<Metric>>(); List<MetricQuery> subQueries = mapQuerySubQueries.get(queries.get(0)); subQueryMetricsMap.put(subQueries.get(0), getMetricsFromMetricString(content1)); subQueryMetricsMap.put(subQueries.get(1), getMetricsFromMetricString(content2)); Map<MetricQuery, List<Metric>> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(1, queryMetricsMap.get(queries.get(0)).size()); // One time series, since no tag specified assertEquals("{}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=4.0, 1477386500=6.0, 1477386600=7.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); } @Test public void testEndPointFederationForkJoinCountDownsampler() { MetricService metricService = system.getServiceFactory().getMetricService(); List<MetricQuery> queries = metricService.getQueries("-1h:scope:metric{tagk=tagv}:count:15m-count"); List<String> readEndPoints = new ArrayList<String>(); readEndPoints.add("http://localhost:4477"); readEndPoints.add("http://localhost:4488"); String content1 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"host\":\"machineHost1\",\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":1}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"host\":\"machineHost3\",\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386500\":1}}]"; String content2 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"host\":\"machineHost2\",\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":1}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"host\":\"machineHost3\",\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386600\":1}}]"; QueryFederation queryFederation = new EndPointQueryFederation(readEndPoints); Map<MetricQuery, List<MetricQuery>> mapQuerySubQueries = queryFederation.federateQueries(queries); assertEquals(2, mapQuerySubQueries.get(queries.get(0)).size()); Map<MetricQuery, List<Metric>> subQueryMetricsMap = new HashMap<MetricQuery, List<Metric>>(); List<MetricQuery> subQueries = mapQuerySubQueries.get(queries.get(0)); subQueryMetricsMap.put(subQueries.get(0), getMetricsFromMetricString(content1)); subQueryMetricsMap.put(subQueries.get(1), getMetricsFromMetricString(content2)); Map<MetricQuery, List<Metric>> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(3, queryMetricsMap.get(queries.get(0)).size()); // Three time series assertEquals("{host=machineHost1}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=1.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); assertEquals("{host=machineHost2}", queryMetricsMap.get(queries.get(0)).get(1).getTags().toString()); assertEquals("{1477386300=1.0}", queryMetricsMap.get(queries.get(0)).get(1).getDatapoints().toString()); assertEquals("{host=machineHost3}", queryMetricsMap.get(queries.get(0)).get(2).getTags().toString()); assertEquals("{1477386500=1.0, 1477386600=1.0}", queryMetricsMap.get(queries.get(0)).get(2).getDatapoints().toString()); } @Test public void testEndPointFederationForkJoinCountDownsamplerWithNoTag() { MetricService metricService = system.getServiceFactory().getMetricService(); List<MetricQuery> queries = metricService.getQueries("-1h:scope:metric{tagk=tagv}:count:15m-count"); List<String> readEndPoints = new ArrayList<String>(); readEndPoints.add("http://localhost:4477"); readEndPoints.add("http://localhost:4488"); String content1 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":3}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386500\":6}}]"; String content2 = "[{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E000000000001000000000003000000000002000000000002\"],\"dps\":{\"1477386300\":4}},{\"metric\":\"mem.heap.used-__-argus.jvm\",\"tags\":{\"meta\":\"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOiJieXRlcyJ9\"},\"aggregateTags\":[],\"tsuids\":[\"00000000000E00000000000100000000000300000000000200000000000A\"],\"dps\":{\"1477386600\":7}}]"; QueryFederation queryFederation = new EndPointQueryFederation(readEndPoints); Map<MetricQuery, List<MetricQuery>> mapQuerySubQueries = queryFederation.federateQueries(queries); assertEquals(2, mapQuerySubQueries.get(queries.get(0)).size()); Map<MetricQuery, List<Metric>> subQueryMetricsMap = new HashMap<MetricQuery, List<Metric>>(); List<MetricQuery> subQueries = mapQuerySubQueries.get(queries.get(0)); subQueryMetricsMap.put(subQueries.get(0), getMetricsFromMetricString(content1)); subQueryMetricsMap.put(subQueries.get(1), getMetricsFromMetricString(content2)); Map<MetricQuery, List<Metric>> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(1, queryMetricsMap.get(queries.get(0)).size()); // One time series, since no tag specified assertEquals("{}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=7.0, 1477386500=6.0, 1477386600=7.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); } private List<Metric> getMetricsFromMetricString(String content){ List<Metric> metrics = null; try { ResultSet set = _mapper.readValue(content, new TypeReference<ResultSet>() { }); metrics = set.getMetrics(); } catch (IOException e) { fail("Failed to deserialize metrics. Reason: " + e.getMessage()); } return metrics; } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.kstream.internals; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.kstream.JoinWindows; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KStreamBuilder; import org.apache.kafka.streams.processor.internals.ProcessorRecordContext; import org.apache.kafka.test.KStreamTestDriver; import org.apache.kafka.test.MockProcessorContext; import org.apache.kafka.test.MockProcessorSupplier; import org.apache.kafka.test.MockValueJoiner; import org.apache.kafka.test.TestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import static org.junit.Assert.assertEquals; public class KStreamKStreamJoinTest { final private String topic1 = "topic1"; final private String topic2 = "topic2"; final private Serde<Integer> intSerde = Serdes.Integer(); final private Serde<String> stringSerde = Serdes.String(); private KStreamTestDriver driver = null; private File stateDir = null; @After public void tearDown() { if (driver != null) { driver.close(); } driver = null; } @Before public void setUp() throws IOException { stateDir = TestUtils.tempDirectory("kafka-test"); } @Test public void testJoin() throws Exception { KStreamBuilder builder = new KStreamBuilder(); final int[] expectedKeys = new int[]{0, 1, 2, 3}; KStream<Integer, String> stream1; KStream<Integer, String> stream2; KStream<Integer, String> joined; MockProcessorSupplier<Integer, String> processor; processor = new MockProcessorSupplier<>(); stream1 = builder.stream(intSerde, stringSerde, topic1); stream2 = builder.stream(intSerde, stringSerde, topic2); joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(100), intSerde, stringSerde, stringSerde); joined.process(processor); Collection<Set<String>> copartitionGroups = builder.copartitionGroups(); assertEquals(1, copartitionGroups.size()); assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next()); driver = new KStreamTestDriver(builder, stateDir); driver.setTime(0L); // push two items to the primary stream. the other window is empty // w1 = {} // w2 = {} // --> w1 = { 0:X0, 1:X1 } // w2 = {} for (int i = 0; i < 2; i++) { driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]); } processor.checkAndClearProcessResult(); // push two items to the other stream. this should produce two items. // w1 = { 0:X0, 1:X1 } // w2 = {} // --> w1 = { 0:X0, 1:X1 } // w2 = { 0:Y0, 1:Y1 } for (int i = 0; i < 2; i++) { driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]); } processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1"); // push all four items to the primary stream. this should produce two items. // w1 = { 0:X0, 1:X1 } // w2 = { 0:Y0, 1:Y1 } // --> w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3 } // w2 = { 0:Y0, 1:Y1 } for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "X" + expectedKey); } processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1"); // push all items to the other stream. this should produce six items. // w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3 } // w2 = { 0:Y0, 1:Y1 } // --> w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 1:YY1, 2:YY2, 3:YY3 } for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "0:X0+YY0", "1:X1+YY1", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); // push all four items to the primary stream. this should produce six items. // w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3 // --> w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3, 0:XX0, 1:XX1, 2:XX2, 3:XX3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 1:YY1, 2:YY2, 3:YY3 } for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("0:XX0+Y0", "0:XX0+YY0", "1:XX1+Y1", "1:XX1+YY1", "2:XX2+YY2", "3:XX3+YY3"); // push two items to the other stream. this should produce six item. // w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3, 0:XX0, 1:XX1, 2:XX2, 3:XX3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3 // --> w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3, 0:XX0, 1:XX1, 2:XX2, 3:XX3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 1:YY1, 2:YY2, 3:YY3, 0:YYY0, 1:YYY1 } for (int i = 0; i < 2; i++) { driver.process(topic2, expectedKeys[i], "YYY" + expectedKeys[i]); } processor.checkAndClearProcessResult("0:X0+YYY0", "0:X0+YYY0", "0:XX0+YYY0", "1:X1+YYY1", "1:X1+YYY1", "1:XX1+YYY1"); } @Test public void testOuterJoin() throws Exception { KStreamBuilder builder = new KStreamBuilder(); final int[] expectedKeys = new int[]{0, 1, 2, 3}; KStream<Integer, String> stream1; KStream<Integer, String> stream2; KStream<Integer, String> joined; MockProcessorSupplier<Integer, String> processor; processor = new MockProcessorSupplier<>(); stream1 = builder.stream(intSerde, stringSerde, topic1); stream2 = builder.stream(intSerde, stringSerde, topic2); joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(100), intSerde, stringSerde, stringSerde); joined.process(processor); Collection<Set<String>> copartitionGroups = builder.copartitionGroups(); assertEquals(1, copartitionGroups.size()); assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next()); driver = new KStreamTestDriver(builder, stateDir); driver.setTime(0L); // push two items to the primary stream. the other window is empty.this should produce two items // w1 = {} // w2 = {} // --> w1 = { 0:X0, 1:X1 } // w2 = {} for (int i = 0; i < 2; i++) { driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]); } processor.checkAndClearProcessResult("0:X0+null", "1:X1+null"); // push two items to the other stream. this should produce two items. // w1 = { 0:X0, 1:X1 } // w2 = {} // --> w1 = { 0:X0, 1:X1 } // w2 = { 0:Y0, 1:Y1 } for (int i = 0; i < 2; i++) { driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]); } processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1"); // push all four items to the primary stream. this should produce four items. // w1 = { 0:X0, 1:X1 } // w2 = { 0:Y0, 1:Y1 } // --> w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3 } // w2 = { 0:Y0, 1:Y1 } for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "X" + expectedKey); } processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1", "2:X2+null", "3:X3+null"); // push all items to the other stream. this should produce six items. // w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3 } // w2 = { 0:Y0, 1:Y1 } // --> w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3 } for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "0:X0+YY0", "1:X1+YY1", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); // push all four items to the primary stream. this should produce six items. // w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3 // --> w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3, 0:XX0, 1:XX1, 2:XX2, 3:XX3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3 } for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("0:XX0+Y0", "0:XX0+YY0", "1:XX1+Y1", "1:XX1+YY1", "2:XX2+YY2", "3:XX3+YY3"); // push two items to the other stream. this should produce six item. // w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3, 0:XX0, 1:XX1, 2:XX2, 3:XX3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3 // --> w1 = { 0:X0, 1:X1, 0:X0, 1:X1, 2:X2, 3:X3, 0:XX0, 1:XX1, 2:XX2, 3:XX3 } // w2 = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3, 0:YYY0, 1:YYY1 } for (int i = 0; i < 2; i++) { driver.process(topic2, expectedKeys[i], "YYY" + expectedKeys[i]); } processor.checkAndClearProcessResult("0:X0+YYY0", "0:X0+YYY0", "0:XX0+YYY0", "1:X1+YYY1", "1:X1+YYY1", "1:XX1+YYY1"); } @Test public void testWindowing() throws Exception { long time = 0L; KStreamBuilder builder = new KStreamBuilder(); final int[] expectedKeys = new int[]{0, 1, 2, 3}; KStream<Integer, String> stream1; KStream<Integer, String> stream2; KStream<Integer, String> joined; MockProcessorSupplier<Integer, String> processor; processor = new MockProcessorSupplier<>(); stream1 = builder.stream(intSerde, stringSerde, topic1); stream2 = builder.stream(intSerde, stringSerde, topic2); joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(100), intSerde, stringSerde, stringSerde); joined.process(processor); Collection<Set<String>> copartitionGroups = builder.copartitionGroups(); assertEquals(1, copartitionGroups.size()); assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next()); driver = new KStreamTestDriver(builder, stateDir); // push two items to the primary stream. the other window is empty. this should produce no items. // w1 = {} // w2 = {} // --> w1 = { 0:X0, 1:X1 } // w2 = {} setRecordContext(time, topic1); for (int i = 0; i < 2; i++) { driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]); } processor.checkAndClearProcessResult(); // push two items to the other stream. this should produce two items. // w1 = { 0:X0, 1:X1 } // w2 = {} // --> w1 = { 0:X0, 1:X1 } // w2 = { 0:Y0, 1:Y1 } setRecordContext(time, topic2); for (int i = 0; i < 2; i++) { driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]); } processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1"); // clear logically time = 1000L; setRecordContext(time, topic1); for (int i = 0; i < expectedKeys.length; i++) { setRecordContext(time + i, topic1); driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]); } processor.checkAndClearProcessResult(); // gradually expires items in w1 // w1 = { 0:X0, 1:X1, 2:X2, 3:X3 } time = 1000 + 100L; setRecordContext(time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult(); // go back to the time before expiration time = 1000L - 100L - 1L; setRecordContext(time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult(); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); // clear (logically) time = 2000L; for (int i = 0; i < expectedKeys.length; i++) { setRecordContext(time + i, topic2); driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]); } processor.checkAndClearProcessResult(); // gradually expires items in w2 // w2 = { 0:Y0, 1:Y1, 2:Y2, 3:Y3 } time = 2000L + 100L; setRecordContext(time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3"); setRecordContext(++time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3"); setRecordContext(++time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("2:XX2+Y2", "3:XX3+Y3"); setRecordContext(++time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("3:XX3+Y3"); setRecordContext(++time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult(); // go back to the time before expiration time = 2000L - 100L - 1L; setRecordContext(time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult(); setRecordContext(++time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("0:XX0+Y0"); setRecordContext(++time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+Y1"); setRecordContext(++time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2"); setRecordContext(++time, topic1); for (int expectedKey : expectedKeys) { driver.process(topic1, expectedKey, "XX" + expectedKey); } processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3"); } @Test public void testAsymetricWindowingAfter() throws Exception { long time = 1000L; KStreamBuilder builder = new KStreamBuilder(); final int[] expectedKeys = new int[]{0, 1, 2, 3}; KStream<Integer, String> stream1; KStream<Integer, String> stream2; KStream<Integer, String> joined; MockProcessorSupplier<Integer, String> processor; processor = new MockProcessorSupplier<>(); stream1 = builder.stream(intSerde, stringSerde, topic1); stream2 = builder.stream(intSerde, stringSerde, topic2); joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(0).after(100), intSerde, stringSerde, stringSerde); joined.process(processor); Collection<Set<String>> copartitionGroups = builder.copartitionGroups(); assertEquals(1, copartitionGroups.size()); assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next()); driver = new KStreamTestDriver(builder, stateDir); for (int i = 0; i < expectedKeys.length; i++) { setRecordContext(time + i, topic1); driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]); } processor.checkAndClearProcessResult(); time = 1000L - 1L; setRecordContext(time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult(); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); time = 1000 + 100L; setRecordContext(time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult(); } @Test public void testAsymetricWindowingBefore() throws Exception { long time = 1000L; KStreamBuilder builder = new KStreamBuilder(); final int[] expectedKeys = new int[]{0, 1, 2, 3}; KStream<Integer, String> stream1; KStream<Integer, String> stream2; KStream<Integer, String> joined; MockProcessorSupplier<Integer, String> processor; processor = new MockProcessorSupplier<>(); stream1 = builder.stream(intSerde, stringSerde, topic1); stream2 = builder.stream(intSerde, stringSerde, topic2); joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(0).before(100), intSerde, stringSerde, stringSerde); joined.process(processor); Collection<Set<String>> copartitionGroups = builder.copartitionGroups(); assertEquals(1, copartitionGroups.size()); assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next()); driver = new KStreamTestDriver(builder, stateDir); for (int i = 0; i < expectedKeys.length; i++) { setRecordContext(time + i, topic1); driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]); } processor.checkAndClearProcessResult(); time = 1000L - 100L - 1L; setRecordContext(time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult(); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); time = 1000L; setRecordContext(time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("1:X1+YY1", "2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("2:X2+YY2", "3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult("3:X3+YY3"); setRecordContext(++time, topic2); for (int expectedKey : expectedKeys) { driver.process(topic2, expectedKey, "YY" + expectedKey); } processor.checkAndClearProcessResult(); } private void setRecordContext(final long time, final String topic) { ((MockProcessorContext) driver.context()).setRecordContext(new ProcessorRecordContext(time, 0, 0, topic)); } }
/** * JBoss, Home of Professional Open Source * Copyright Red Hat, Inc., and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.aerogear.unifiedpush.message.sender; import com.notnoop.apns.APNS; import com.notnoop.apns.ApnsDelegateAdapter; import com.notnoop.apns.ApnsNotification; import com.notnoop.apns.ApnsService; import com.notnoop.apns.ApnsServiceBuilder; import com.notnoop.apns.DeliveryError; import com.notnoop.apns.EnhancedApnsNotification; import com.notnoop.apns.PayloadBuilder; import com.notnoop.apns.internal.Utilities; import com.notnoop.exceptions.ApnsDeliveryErrorException; import org.jboss.aerogear.unifiedpush.api.Variant; import org.jboss.aerogear.unifiedpush.api.iOSVariant; import org.jboss.aerogear.unifiedpush.message.Message; import org.jboss.aerogear.unifiedpush.message.UnifiedPushMessage; import org.jboss.aerogear.unifiedpush.message.apns.APNs; import org.jboss.aerogear.unifiedpush.service.ClientInstallationService; import org.jboss.aerogear.unifiedpush.utils.AeroGearLogger; import javax.inject.Inject; import java.io.ByteArrayInputStream; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.Set; import static org.jboss.aerogear.unifiedpush.message.util.ConfigurationUtils.tryGetProperty; import static org.jboss.aerogear.unifiedpush.message.util.ConfigurationUtils.tryGetIntegerProperty; @SenderType(iOSVariant.class) public class APNsPushNotificationSender implements PushNotificationSender { public static final String CUSTOM_AEROGEAR_APNS_PUSH_HOST = "custom.aerogear.apns.push.host"; public static final String CUSTOM_AEROGEAR_APNS_PUSH_PORT = "custom.aerogear.apns.push.port"; private static final String CUSTOM_AEROGEAR_APNS_FEEDBACK_HOST = "custom.aerogear.apns.feedback.host"; private static final String CUSTOM_AEROGEAR_APNS_FEEDBACK_PORT = "custom.aerogear.apns.feedback.port"; private static final String customAerogearApnsPushHost = tryGetProperty(CUSTOM_AEROGEAR_APNS_PUSH_HOST); private static final Integer customAerogearApnsPushPort = tryGetIntegerProperty(CUSTOM_AEROGEAR_APNS_PUSH_PORT); private static final String customAerogearApnsFeedbackHost = tryGetProperty(CUSTOM_AEROGEAR_APNS_FEEDBACK_HOST); private static final Integer customAerogearApnsFeedbackPort = tryGetIntegerProperty(CUSTOM_AEROGEAR_APNS_FEEDBACK_PORT); private final AeroGearLogger logger = AeroGearLogger.getInstance(APNsPushNotificationSender.class); @Inject private ClientInstallationService clientInstallationService; /** * Sends APNs notifications ({@link UnifiedPushMessage}) to all devices, that are represented by * the {@link Collection} of tokens for the given {@link iOSVariant}. */ public void sendPushMessage(final Variant variant, final Collection<String> tokens, final UnifiedPushMessage pushMessage, final NotificationSenderCallback callback) { // no need to send empty list if (tokens.isEmpty()) { return; } final iOSVariant iOSVariant = (iOSVariant) variant; Message message = pushMessage.getMessage(); APNs apns = message.getApns(); PayloadBuilder builder = APNS.newPayload() // adding recognized key values .alertBody(message.getAlert()) // alert dialog, in iOS or Safari .badge(message.getBadge()) // little badge icon update; .sound(message.getSound()) // sound to be played by app .alertTitle(apns.getTitle()) // The title of the notification in Safari and Apple Watch .alertAction(apns.getAction()) // The label of the action button, if the user sets the notifications to appear as alerts in Safari. .urlArgs(apns.getUrlArgs()) .category(apns.getActionCategory()) // iOS8: User Action category .localizedTitleKey(apns.getLocalizedTitleKey()); //iOS8 : Localized Title Key //this kind of check should belong in java-apns if(apns.getLocalizedTitleArguments() != null) { builder .localizedArguments(apns.getLocalizedTitleArguments()); //iOS8 : Localized Title Arguments; } // apply the 'content-available:1' value: if (apns.isContentAvailable()) { // content-available is for 'silent' notifications and Newsstand builder = builder.instantDeliveryOrSilentNotification(); } builder = builder.customFields(message.getUserData()); // adding other (submitted) fields // we are done with adding values here, before building let's check if the msg is too long if (builder.isTooLong()) { // invoke the error callback and return, as it is pointless to send something out callback.onError("Nothing sent to APNs since the payload is too large"); return; } // all good, let's build the JSON payload for APNs final String apnsMessage = builder.build(); ApnsService service = buildApnsService(iOSVariant, callback); if (service != null) { try { logger.fine("Sending transformed APNs payload: " + apnsMessage); // send: service.start(); Date expireDate = createFutureDateBasedOnTTL(pushMessage.getConfig().getTimeToLive()); service.push(tokens, apnsMessage, expireDate); logger.info("Message to APNs has been submitted"); // after sending, let's ask for the inactive tokens: final Set<String> inactiveTokens = service.getInactiveDevices().keySet(); // transform the tokens to be all lower-case: final Set<String> transformedTokens = lowerCaseAllTokens(inactiveTokens); // trigger asynchronous deletion: if (! transformedTokens.isEmpty()) { logger.info("Deleting '" + inactiveTokens.size() + "' invalid iOS installations"); clientInstallationService.removeInstallationsForVariantByDeviceTokens(iOSVariant.getVariantID(), transformedTokens); } callback.onSuccess(); } catch (Exception e) { callback.onError("Error sending payload to APNs server: " + e.getMessage()); } finally { // tear down and release resources: service.stop(); } } else { callback.onError("No certificate was found. Could not send messages to APNs"); } } /** * Helper method that creates a future {@link Date}, based on the given ttl/time-to-live value. * If no TTL was provided, we use the max date from the APNs library */ private Date createFutureDateBasedOnTTL(int ttl) { // no TTL was specified on the payload, we use the MAX Default from the APNs library: if (ttl == -1) { return new Date(System.currentTimeMillis() + EnhancedApnsNotification.MAXIMUM_EXPIRY * 1000L); } else { // apply the given TTL to the current time return new Date(System.currentTimeMillis() + ttl); } } /** * The Java-APNs lib returns the tokens in UPPERCASE format, however, the iOS Devices submit the token in * LOWER CASE format. This helper method performs a transformation */ private Set<String> lowerCaseAllTokens(Set<String> inactiveTokens) { final Set<String> lowerCaseTokens = new HashSet<String>(); for (String token : inactiveTokens) { lowerCaseTokens.add(token.toLowerCase()); } return lowerCaseTokens; } /** * Returns the ApnsService, based on the required profile (production VS sandbox/test). * Null is returned if there is no "configuration" for the request stage */ private ApnsService buildApnsService(final iOSVariant iOSVariant, final NotificationSenderCallback notificationSenderCallback) { // this check should not be needed, but you never know: if (iOSVariant.getCertificate() != null && iOSVariant.getPassphrase() != null) { final ApnsServiceBuilder builder = APNS.newService(); // using the APNS Delegate callback to log success/failure for each token: builder.withDelegate(new ApnsDelegateAdapter() { @Override public void messageSent(ApnsNotification message, boolean resent) { // Invoked for EVERY devicetoken: logger.finest("Sending APNs message to: " + message.getDeviceToken()); } @Override public void messageSendFailed(ApnsNotification message, Throwable e) { if (e.getClass().isAssignableFrom(ApnsDeliveryErrorException.class)) { ApnsDeliveryErrorException deliveryError = (ApnsDeliveryErrorException) e; if (DeliveryError.INVALID_TOKEN.equals(deliveryError.getDeliveryError())) { final String invalidToken = Utilities.encodeHex(message.getDeviceToken()).toLowerCase(); logger.info("Removing invalid token: " + invalidToken); clientInstallationService.removeInstallationForVariantByDeviceToken(iOSVariant.getVariantID(), invalidToken); } else { // for now, we just log the other cases logger.severe("Error sending payload to APNs server", e); } } } }); // add the certificate: try { ByteArrayInputStream stream = new ByteArrayInputStream(iOSVariant.getCertificate()); builder.withCert(stream, iOSVariant.getPassphrase()); // release the stream stream.close(); } catch (Exception e) { logger.severe("Error reading certificate", e); // indicating an incomplete service return null; } configureDestinations(iOSVariant, builder); // create the service return builder.build(); } // null if, why ever, there was no cert/passphrase return null; } /** * Configure the Gateway to the Apns servers. * Default gateway and port can be override with respectively : * - custom.aerogear.apns.push.host * - custom.aerogear.apns.push.port * * Feedback gateway and port can be override with respectively : * - custom.aerogear.apns.feedback.host * - custom.aerogear.apns.feedback.port * @param iOSVariant * @param builder */ private void configureDestinations(iOSVariant iOSVariant, ApnsServiceBuilder builder) { // pick the destination, based on submitted profile: builder.withAppleDestination(iOSVariant.isProduction()); //Is the gateway host&port provided by a system property, for tests ? if(customAerogearApnsPushHost != null){ int port = Utilities.SANDBOX_GATEWAY_PORT; if(customAerogearApnsPushPort != null) { port = customAerogearApnsPushPort; } builder.withGatewayDestination(customAerogearApnsPushHost, port); } //Is the feedback gateway provided by a system property, for tests ? if(customAerogearApnsFeedbackHost != null){ int port = Utilities.SANDBOX_FEEDBACK_PORT; if(customAerogearApnsFeedbackPort != null) { port = customAerogearApnsFeedbackPort; } builder.withFeedbackDestination(customAerogearApnsFeedbackHost, port); } } }
/* * Copyright (C) 2004-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.util.cache; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import org.jivesoftware.openfire.XMPPServer; import org.jivesoftware.openfire.XMPPServerListener; import org.jivesoftware.openfire.cluster.ClusterEventListener; import org.jivesoftware.openfire.cluster.ClusterManager; import org.jivesoftware.openfire.cluster.ClusterNodeInfo; import org.jivesoftware.openfire.container.Plugin; import org.jivesoftware.openfire.container.PluginClassLoader; import org.jivesoftware.openfire.container.PluginManager; import org.jivesoftware.util.InitializationException; import org.jivesoftware.util.JiveConstants; import org.jivesoftware.util.JiveGlobals; import org.jivesoftware.util.PropertyEventDispatcher; import org.jivesoftware.util.PropertyEventListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmpp.packet.JID; /** * Creates Cache objects. The returned caches will either be local or clustered * depending on the clustering enabled setting and a user's license. * * <p>When clustered caching is turned on, cache usage statistics for all caches * that have been created are periodically published to the clustered cache * named "opt-$cacheStats".</p> * */ @SuppressWarnings("rawtypes") public class CacheFactory { private static final Logger log = LoggerFactory.getLogger(CacheFactory.class); public static String LOCAL_CACHE_PROPERTY_NAME = "cache.clustering.local.class"; public static String CLUSTERED_CACHE_PROPERTY_NAME = "cache.clustering.clustered.class"; private static boolean clusteringStarted = false; private static boolean clusteringStarting = false; /** * Storage for all caches that get created. */ private static Map<String, Cache> caches = new ConcurrentHashMap<>(); private static List<String> localOnly = Collections.synchronizedList(new ArrayList<String>()); private static String localCacheFactoryClass; private static String clusteredCacheFactoryClass; private static CacheFactoryStrategy cacheFactoryStrategy = new DefaultLocalCacheStrategy(); private static CacheFactoryStrategy localCacheFactoryStrategy; private static CacheFactoryStrategy clusteredCacheFactoryStrategy; private static Thread statsThread; public static final int DEFAULT_MAX_CACHE_SIZE = 1024 * 256; public static final long DEFAULT_MAX_CACHE_LIFETIME = 6 * JiveConstants.HOUR; /** * This map contains property names which were used to store cache configuration data * in local xml properties in previous versions. */ private static final Map<String, String> cacheNames = new HashMap<>(); /** * Default properties to use for local caches. Default properties can be overridden * by setting the corresponding system properties. */ private static final Map<String, Long> cacheProps = new HashMap<>(); private static final String PROPERTY_PREFIX_CACHE = "cache."; private static final String PROPERTY_SUFFIX_MAX_LIFE_TIME = ".maxLifetime"; private static final String PROPERTY_SUFFIX_SIZE = ".size"; private static final String PROPERTY_SUFFIX_TYPE = ".type"; private static final String PROPERTY_SUFFIX_MIN = ".min"; static { localCacheFactoryClass = JiveGlobals.getProperty(LOCAL_CACHE_PROPERTY_NAME, "org.jivesoftware.util.cache.DefaultLocalCacheStrategy"); clusteredCacheFactoryClass = JiveGlobals.getProperty(CLUSTERED_CACHE_PROPERTY_NAME, "org.jivesoftware.openfire.plugin.util.cache.ClusteredCacheFactory"); cacheNames.put("Favicon Hits", "faviconHits"); cacheNames.put("Favicon Misses", "faviconMisses"); cacheNames.put("Group", "group"); cacheNames.put("Group Metadata Cache", "groupMeta"); cacheNames.put("Javascript Cache", "javascript"); cacheNames.put("Last Activity Cache", "lastActivity"); cacheNames.put("Multicast Service", "multicast"); cacheNames.put("Offline Message Size", "offlinemessage"); cacheNames.put("Offline Presence Cache", "offlinePresence"); cacheNames.put("Privacy Lists", "listsCache"); cacheNames.put("Remote Users Existence", "remoteUsersCache"); cacheNames.put("Roster", "username2roster"); cacheNames.put("RosterItems", "username2rosterItems"); cacheNames.put("User", "userCache"); cacheNames.put("Locked Out Accounts", "lockOutCache"); cacheNames.put("VCard", "vcardCache"); cacheNames.put("File Transfer Cache", "fileTransfer"); cacheNames.put("File Transfer", "transferProxy"); cacheNames.put("POP3 Authentication", "pop3"); cacheNames.put("LDAP Authentication", "ldap"); cacheNames.put("Routing Servers Cache", "routeServer"); cacheNames.put("Routing Components Cache", "routeComponent"); cacheNames.put("Routing Users Cache", "routeUser"); cacheNames.put("Routing AnonymousUsers Cache", "routeAnonymousUser"); cacheNames.put("Routing User Sessions", "routeUserSessions"); cacheNames.put("Components Sessions", "componentsSessions"); cacheNames.put("Connection Managers Sessions", "connManagerSessions"); cacheNames.put("Incoming Server Sessions", "incServerSessions"); cacheNames.put("Sessions by Hostname", "sessionsHostname"); cacheNames.put("Secret Keys Cache", "secretKeys"); cacheNames.put("Validated Domains", "validatedDomains"); cacheNames.put("Directed Presences", "directedPresences"); cacheNames.put("Disco Server Features", "serverFeatures"); cacheNames.put("Disco Server Items", "serverItems"); cacheNames.put("Remote Server Configurations", "serversConfigurations"); cacheNames.put("Entity Capabilities", "entityCapabilities"); cacheNames.put("Entity Capabilities Users", "entityCapabilitiesUsers"); cacheNames.put("PEPServiceManager", "pepServiceManager"); cacheNames.put("Published Items", "publishedItems"); cacheNames.put("JID Node-parts", "jidNodeprep"); cacheNames.put("JID Domain-parts", "jidDomainprep"); cacheNames.put("JID Resource-parts", "jidResourceprep"); cacheProps.put(PROPERTY_PREFIX_CACHE + "fileTransfer" + PROPERTY_SUFFIX_SIZE, 128 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "fileTransfer" + PROPERTY_SUFFIX_MAX_LIFE_TIME, 1000 * 60 * 10L); cacheProps.put(PROPERTY_PREFIX_CACHE + "multicast" + PROPERTY_SUFFIX_SIZE, 128 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "multicast" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.DAY); cacheProps.put(PROPERTY_PREFIX_CACHE + "offlinemessage" + PROPERTY_SUFFIX_SIZE, 100 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "offlinemessage" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.HOUR * 12); cacheProps.put(PROPERTY_PREFIX_CACHE + "pop3" + PROPERTY_SUFFIX_SIZE, 512 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "pop3" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.HOUR); cacheProps.put(PROPERTY_PREFIX_CACHE + "transferProxy" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "transferProxy" + PROPERTY_SUFFIX_MAX_LIFE_TIME, 1000 * 60 * 10L); cacheProps.put(PROPERTY_PREFIX_CACHE + "group" + PROPERTY_SUFFIX_SIZE, 1024 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "group" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 15); cacheProps.put(PROPERTY_PREFIX_CACHE + "lockOutCache" + PROPERTY_SUFFIX_SIZE, 1024 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "lockOutCache" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 15); cacheProps.put(PROPERTY_PREFIX_CACHE + "groupMeta" + PROPERTY_SUFFIX_SIZE, 512 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "groupMeta" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 15); cacheProps.put(PROPERTY_PREFIX_CACHE + "username2roster" + PROPERTY_SUFFIX_SIZE, 1024 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "username2roster" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 30); cacheProps.put(PROPERTY_PREFIX_CACHE + "username2rosterItems" + PROPERTY_SUFFIX_SIZE, 1024 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "username2rosterItems" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 10); cacheProps.put(PROPERTY_PREFIX_CACHE + "javascript" + PROPERTY_SUFFIX_SIZE, 128 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "javascript" + PROPERTY_SUFFIX_MAX_LIFE_TIME, 3600 * 24 * 10L); cacheProps.put(PROPERTY_PREFIX_CACHE + "ldap" + PROPERTY_SUFFIX_SIZE, 512 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "ldap" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.HOUR * 2); cacheProps.put(PROPERTY_PREFIX_CACHE + "listsCache" + PROPERTY_SUFFIX_SIZE, 512 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "offlinePresence" + PROPERTY_SUFFIX_SIZE, 512 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "lastActivity" + PROPERTY_SUFFIX_SIZE, 128 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "userCache" + PROPERTY_SUFFIX_SIZE, 512 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "userCache" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 30); cacheProps.put(PROPERTY_PREFIX_CACHE + "remoteUsersCache" + PROPERTY_SUFFIX_SIZE, 512 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "remoteUsersCache" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 30); cacheProps.put(PROPERTY_PREFIX_CACHE + "vcardCache" + PROPERTY_SUFFIX_SIZE, 512 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "faviconHits" + PROPERTY_SUFFIX_SIZE, 128 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "faviconMisses" + PROPERTY_SUFFIX_SIZE, 128 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeServer" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeServer" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeComponent" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeComponent" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeUser" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeUser" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeAnonymousUser" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeAnonymousUser" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeUserSessions" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "routeUserSessions" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "componentsSessions" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "componentsSessions" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "connManagerSessions" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "connManagerSessions" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "incServerSessions" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "incServerSessions" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "sessionsHostname" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "sessionsHostname" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "secretKeys" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "secretKeys" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "validatedDomains" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "validatedDomains" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "directedPresences" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "directedPresences" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "serverFeatures" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "serverFeatures" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "serverItems" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "serverItems" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "serversConfigurations" + PROPERTY_SUFFIX_SIZE, 128 * 1024L); cacheProps.put(PROPERTY_PREFIX_CACHE + "serversConfigurations" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 30); cacheProps.put(PROPERTY_PREFIX_CACHE + "entityCapabilities" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "entityCapabilities" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.DAY * 2); cacheProps.put(PROPERTY_PREFIX_CACHE + "entityCapabilitiesUsers" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "entityCapabilitiesUsers" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.DAY * 2); cacheProps.put(PROPERTY_PREFIX_CACHE + "pluginCacheInfo" + PROPERTY_SUFFIX_SIZE, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "pluginCacheInfo" + PROPERTY_SUFFIX_MAX_LIFE_TIME, -1L); cacheProps.put(PROPERTY_PREFIX_CACHE + "pepServiceManager" + PROPERTY_SUFFIX_SIZE, 1024L * 1024 * 10); cacheProps.put(PROPERTY_PREFIX_CACHE + "pepServiceManager" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 30); cacheProps.put(PROPERTY_PREFIX_CACHE + "publishedItems" + PROPERTY_SUFFIX_SIZE, 1024L * 1024 * 10); cacheProps.put(PROPERTY_PREFIX_CACHE + "publishedItems" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JiveConstants.MINUTE * 15); // The JID-based classes (wrappers for Caffeine caches) take their default values from whatever is hardcoded in the JID implementation. cacheProps.put(PROPERTY_PREFIX_CACHE + "jidNodeprep" + PROPERTY_SUFFIX_SIZE, JID.NODEPREP_CACHE.policy().eviction().get().getMaximum() ); cacheProps.put(PROPERTY_PREFIX_CACHE + "jidNodeprep" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JID.NODEPREP_CACHE.policy().expireAfterWrite().get().getExpiresAfter( TimeUnit.MILLISECONDS ) ); cacheProps.put(PROPERTY_PREFIX_CACHE + "jidDomainprep" + PROPERTY_SUFFIX_SIZE, JID.DOMAINPREP_CACHE.policy().eviction().get().getMaximum() ); cacheProps.put(PROPERTY_PREFIX_CACHE + "jidDomainprep" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JID.DOMAINPREP_CACHE.policy().expireAfterWrite().get().getExpiresAfter( TimeUnit.MILLISECONDS ) ); cacheProps.put(PROPERTY_PREFIX_CACHE + "jidResourceprep" + PROPERTY_SUFFIX_SIZE, JID.RESOURCEPREP_CACHE.policy().eviction().get().getMaximum() ); cacheProps.put(PROPERTY_PREFIX_CACHE + "jidResourceprep" + PROPERTY_SUFFIX_MAX_LIFE_TIME, JID.RESOURCEPREP_CACHE.policy().expireAfterWrite().get().getExpiresAfter( TimeUnit.MILLISECONDS ) ); PropertyEventDispatcher.addListener( new PropertyEventListener() { @Override public void propertySet( String property, Map<String, Object> params ) { final Cache cache = getCacheByProperty( property ); if ( cache == null ) { return; } if (property.endsWith(PROPERTY_SUFFIX_SIZE)) { final long size = getMaxCacheSize( cache.getName() ); cache.setMaxCacheSize( size ); } if (property.endsWith(PROPERTY_SUFFIX_MAX_LIFE_TIME)) { final long lifetime = getMaxCacheLifetime( cache.getName() ); cache.setMaxLifetime( lifetime ); } // Note that changes to 'min' and 'type' cannot be applied runtime - a restart is required for those. } @Override public void propertyDeleted( String property, Map<String, Object> params ) { propertySet( property, params ); } @Override public void xmlPropertySet( String property, Map<String, Object> params ) { propertySet( property, params ); } @Override public void xmlPropertyDeleted( String property, Map<String, Object> params ) { propertySet( property, params ); } } ); } private CacheFactory() { } /** * If a local property is found for the supplied name which specifies a value for cache size, it is returned. * Otherwise, the defaultSize argument is returned. * * @param cacheName the name of the cache to look up a corresponding property for. * @return either the property value or the default value. */ public static long getMaxCacheSize(String cacheName) { return getCacheProperty(cacheName, PROPERTY_SUFFIX_SIZE, DEFAULT_MAX_CACHE_SIZE); } /** * Sets a local property which overrides the maximum cache size for the * supplied cache name. * @param cacheName the name of the cache to store a value for. * @param size the maximum cache size. */ public static void setMaxSizeProperty(String cacheName, long size) { cacheName = cacheName.replaceAll(" ", ""); if ( !Long.toString(size).equals(JiveGlobals.getProperty(PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_SIZE))) { JiveGlobals.setProperty(PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_SIZE, Long.toString(size)); } } public static boolean hasMaxSizeFromProperty(String cacheName) { return hasCacheProperty(cacheName, PROPERTY_SUFFIX_SIZE); } /** * If a local property is found for the supplied name which specifies a value for cache entry lifetime, it * is returned. Otherwise, the defaultLifetime argument is returned. * * @param cacheName the name of the cache to look up a corresponding property for. * @return either the property value or the default value. */ public static long getMaxCacheLifetime(String cacheName) { return getCacheProperty(cacheName, PROPERTY_SUFFIX_MAX_LIFE_TIME, DEFAULT_MAX_CACHE_LIFETIME); } /** * Sets a local property which overrides the maximum cache entry lifetime * for the supplied cache name. * @param cacheName the name of the cache to store a value for. * @param lifetime the maximum cache entry lifetime. */ public static void setMaxLifetimeProperty(String cacheName, long lifetime) { cacheName = cacheName.replaceAll(" ", ""); if ( !Long.toString(lifetime).equals(JiveGlobals.getProperty(PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_MAX_LIFE_TIME))) { JiveGlobals.setProperty((PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_MAX_LIFE_TIME), Long.toString(lifetime)); } } public static boolean hasMaxLifetimeFromProperty(String cacheName) { return hasCacheProperty(cacheName, PROPERTY_SUFFIX_MAX_LIFE_TIME); } public static void setCacheTypeProperty(String cacheName, String type) { cacheName = cacheName.replaceAll(" ", ""); if ( !type.equals(JiveGlobals.getProperty(PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_TYPE))) { JiveGlobals.setProperty(PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_TYPE, type); } } public static String getCacheTypeProperty(String cacheName) { cacheName = cacheName.replaceAll(" ", ""); return JiveGlobals.getProperty(PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_TYPE); } public static void setMinCacheSize(String cacheName, long size) { cacheName = cacheName.replaceAll(" ", ""); if ( !Long.toString(size).equals(JiveGlobals.getProperty(PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_MIN))) { JiveGlobals.setProperty(PROPERTY_PREFIX_CACHE + cacheName + PROPERTY_SUFFIX_MIN, Long.toString(size)); } } public static long getMinCacheSize(String cacheName) { return getCacheProperty(cacheName, PROPERTY_SUFFIX_MIN, 0); } private static Cache getCacheByProperty( String property ) { if ( !property.startsWith(PROPERTY_PREFIX_CACHE)) { return null; } // Extract the cache name identifier from the property name. final String name = property.substring(PROPERTY_PREFIX_CACHE.length(), property.lastIndexOf(".")); // See if property is using the short name variant. for ( final Map.Entry<String, String> entry : cacheNames.entrySet() ) { if ( name.equals( entry.getValue() ) ) { return caches.get( entry.getKey() ); } } // If not a short name, then try for a normalized name. for ( final Map.Entry<String, Cache> entry : caches.entrySet() ) { if ( entry.getKey().replaceAll(" ", "").equals( name ) ) { return entry.getValue(); } } return null; } private static long getCacheProperty(String cacheName, String suffix, long defaultValue) { // First check if user is overwriting default value using a system property for the cache name String propName = PROPERTY_PREFIX_CACHE + cacheName.replaceAll(" ", "") + suffix; String sizeProp = JiveGlobals.getProperty(propName); if (sizeProp == null && cacheNames.containsKey(cacheName)) { // No system property was found for the cache name so try now with short name propName = PROPERTY_PREFIX_CACHE + cacheNames.get(cacheName) + suffix; sizeProp = JiveGlobals.getProperty(propName); } if (sizeProp != null) { try { return Long.parseLong(sizeProp); } catch (NumberFormatException nfe) { log.warn("Unable to parse " + propName + " using default value."); } } // Check if there is a default size value for this cache Long defaultSize = cacheProps.get(propName); return defaultSize == null ? defaultValue : defaultSize; } private static boolean hasCacheProperty(String cacheName, String suffix) { // First check if user is overwriting default value using a system property for the cache name String propName = PROPERTY_PREFIX_CACHE + cacheName.replaceAll(" ", "") + suffix; String sizeProp = JiveGlobals.getProperty(propName); if (sizeProp == null && cacheNames.containsKey(cacheName)) { // No system property was found for the cache name so try now with short name propName = PROPERTY_PREFIX_CACHE + cacheNames.get(cacheName) + suffix; sizeProp = JiveGlobals.getProperty(propName); } if (sizeProp != null) { try { Long.parseLong(sizeProp); return true; } catch (NumberFormatException nfe) { log.warn("Unable to parse " + propName + " using default value."); } } return false; } /** * Returns an array of all caches in the system. * @return an array of all caches in the system. */ public static Cache[] getAllCaches() { List<Cache> values = new ArrayList<>(); for (Cache cache : caches.values()) { values.add(cache); } return values.toArray(new Cache[values.size()]); } /** * Returns the named cache, creating it as necessary. * * @param name the name of the cache to create. * @param <T> the type cache being created * @return the named cache, creating it as necessary. */ @SuppressWarnings("unchecked") public static synchronized <T extends Cache> T createCache(String name) { T cache = (T) caches.get(name); if (cache != null) { return cache; } cache = (T) cacheFactoryStrategy.createCache(name); log.info("Created cache [" + cacheFactoryStrategy.getClass().getName() + "] for " + name); return wrapCache(cache, name); } /** * Returns the named local cache, creating it as necessary. * * @param name the name of the cache to create. * @param <T> the type cache being created * @return the named cache, creating it as necessary. */ @SuppressWarnings("unchecked") public static synchronized <T extends Cache> T createLocalCache(String name) { T cache = (T) caches.get(name); if (cache != null) { return cache; } cache = (T) localCacheFactoryStrategy.createCache(name); localOnly.add(name); log.info("Created local-only cache [" + localCacheFactoryClass + "] for " + name); return wrapCache(cache, name); } /** * Destroys the cache for the cache name specified. * * @param name the name of the cache to destroy. */ public static synchronized void destroyCache(String name) { Cache cache = caches.remove(name); if (cache != null) { if (localOnly.contains(name)) { localOnly.remove(name); localCacheFactoryStrategy.destroyCache(cache); } else { cacheFactoryStrategy.destroyCache(cache); } } } /** * @deprecated in favour of {@link Cache#getLock}. Will be removed in Openfire 5.0.0. * * <p>Returns an existing {@link java.util.concurrent.locks.Lock} on the specified key or creates a new one * if none was found. This operation is thread safe. Successive calls with the same key may or may not * return the same {@link java.util.concurrent.locks.Lock}. However, different threads asking for the * same Lock at the same time will get the same Lock object.<p> * * The supplied cache may or may not be used depending whether the server is running on cluster mode * or not. When not running as part of a cluster then the lock will be unrelated to the cache and will * only be visible in this JVM. * * @param key the object that defines the visibility or scope of the lock. * @param cache the cache used for holding the lock. * @return an existing lock on the specified key or creates a new one if none was found. */ @Deprecated public static synchronized Lock getLock(Object key, Cache cache) { if (localOnly.contains(cache.getName())) { return localCacheFactoryStrategy.getLock(key, cache); } else { return cacheFactoryStrategy.getLock(key, cache); } } @SuppressWarnings("unchecked") private static <T extends Cache> T wrapCache(T cache, String name) { if ("Routing Components Cache".equals(name)) { cache = (T) new ComponentCacheWrapper(cache); } else { cache = (T) new CacheWrapper(cache); } cache.setName(name); caches.put(name, cache); return cache; } /** * Returns true if clustering is installed and can be used by this JVM * to join a cluster. A false value could mean that either clustering * support is not available or the license does not allow to have more * than 1 cluster node. * * @return true if clustering is installed and can be used by * this JVM to join a cluster. */ public static boolean isClusteringAvailable() { if (clusteredCacheFactoryStrategy == null) { try { clusteredCacheFactoryStrategy = (CacheFactoryStrategy) Class.forName( clusteredCacheFactoryClass, true, getClusteredCacheStrategyClassLoader()).newInstance(); } catch (NoClassDefFoundError | Exception e) { log.warn("Clustered cache factory strategy " + clusteredCacheFactoryClass + " not found"); } } return (clusteredCacheFactoryStrategy != null); } /** * Returns true is clustering is currently being started. Once the cluster * is started or failed to be started this value will be false. * * @return true is clustering is currently being started. */ public static boolean isClusteringStarting() { return clusteringStarting; } /** * Returns true if this node is currently a member of a cluster. The last step of application * initialization is to join a cluster, so this method returns false during most of application startup. * * @return true if this node is currently a member of a cluster. */ public static boolean isClusteringStarted() { return clusteringStarted; } /** * Returns a byte[] that uniquely identifies this member within the cluster or {@code null} * when not in a cluster. * * @return a byte[] that uniquely identifies this member within the cluster or null when not in a cluster. */ public static byte[] getClusterMemberID() { return cacheFactoryStrategy.getClusterMemberID(); } public synchronized static void clearCaches() { for (String cacheName : caches.keySet()) { Cache cache = caches.get(cacheName); cache.clear(); } } public synchronized static void clearCaches( String... cacheName ) { caches.values().parallelStream() .filter(cache -> Arrays.asList(cacheName).contains(cache.getName())) .forEach(Map::clear); } /** * Returns a byte[] that uniquely identifies this senior cluster member or {@code null} * when not in a cluster. * * @return a byte[] that uniquely identifies this senior cluster member or null when not in a cluster. */ public static byte[] getSeniorClusterMemberID() { return cacheFactoryStrategy.getSeniorClusterMemberID(); } /** * Returns true if this member is the senior member in the cluster. If clustering * is not enabled, this method will also return true. This test is useful for * tasks that should only be run on a single member in a cluster. * * @return true if this cluster member is the senior or if clustering is not enabled. */ public static boolean isSeniorClusterMember() { return cacheFactoryStrategy.isSeniorClusterMember(); } /** * Returns basic information about the current members of the cluster or an empty * collection if not running in a cluster. * * @return information about the current members of the cluster or an empty * collection if not running in a cluster. */ public static Collection<ClusterNodeInfo> getClusterNodesInfo() { return cacheFactoryStrategy.getClusterNodesInfo(); } /** * Returns the maximum number of cluster members allowed. A value of 0 will * be returned when clustering is not allowed. * * @return the maximum number of cluster members allowed or 0 if clustering is not allowed. */ public static int getMaxClusterNodes() { return cacheFactoryStrategy.getMaxClusterNodes(); } /** * Gets the pseudo-synchronized time from the cluster. While the cluster members may * have varying system times, this method is expected to return a timestamp that is * synchronized (or nearly so; best effort) across the cluster. * * @return Synchronized time for all cluster members */ public static long getClusterTime() { // use try/catch here for backward compatibility with older plugin(s) try { return cacheFactoryStrategy.getClusterTime(); } catch (AbstractMethodError ame) { log.warn("Cluster time not available; check for update to hazelcast/clustering plugin"); return localCacheFactoryStrategy.getClusterTime(); } } /** * Invokes a task on other cluster members in an asynchronous fashion. The task will not be * executed on the local cluster member. If clustering is not enabled, this method * will do nothing. * * @param task the task to be invoked on all other cluster members. */ public static void doClusterTask(final ClusterTask<?> task) { cacheFactoryStrategy.doClusterTask(task); } /** * Invokes a task on a given cluster member in an asynchronous fashion. If clustering is not enabled, * this method will do nothing. * * @param task the task to be invoked on the specified cluster member. * @param nodeID the byte array that identifies the target cluster member. * @throws IllegalStateException if requested node was not found or not running in a cluster. */ public static void doClusterTask(final ClusterTask<?> task, byte[] nodeID) { cacheFactoryStrategy.doClusterTask(task, nodeID); } /** * Invokes a task on other cluster members synchronously and returns the result as a Collection * (method will not return until the task has been executed on each cluster member). * The task will not be executed on the local cluster member. If clustering is not enabled, * this method will return an empty collection. * * @param task the ClusterTask object to be invoked on all other cluster members. * @param includeLocalMember true to run the task on the local member, false otherwise * @param <T> the return type of the cluster task * @return collection with the result of the execution. */ public static <T> Collection<T> doSynchronousClusterTask(ClusterTask<T> task, boolean includeLocalMember) { return cacheFactoryStrategy.doSynchronousClusterTask(task, includeLocalMember); } /** * Invokes a task on a given cluster member synchronously and returns the result of * the remote operation. If clustering is not enabled, this method will return null. * * @param task the ClusterTask object to be invoked on a given cluster member. * @param nodeID the byte array that identifies the target cluster member. * @param <T> the return type of the cluster task * @return result of remote operation or null if operation failed or operation returned null. * @throws IllegalStateException if requested node was not found or not running in a cluster. */ public static <T> T doSynchronousClusterTask(ClusterTask<T> task, byte[] nodeID) { return cacheFactoryStrategy.doSynchronousClusterTask(task, nodeID); } /** * Returns the node info for the given cluster node * @param nodeID The target cluster node * @return The info for the cluster node or null if not found */ public static ClusterNodeInfo getClusterNodeInfo(byte[] nodeID) { return cacheFactoryStrategy.getClusterNodeInfo(nodeID); } public static String getPluginName() { return cacheFactoryStrategy.getPluginName(); } public static synchronized void initialize() throws InitializationException { try { localCacheFactoryStrategy = (CacheFactoryStrategy) Class.forName(localCacheFactoryClass).newInstance(); cacheFactoryStrategy = localCacheFactoryStrategy; // Update the JID-internal caches, if they're configured differently than their default. JID.NODEPREP_CACHE.policy().eviction().get().setMaximum( getMaxCacheSize( "jidNodeprep" ) ); JID.NODEPREP_CACHE.policy().expireAfterWrite().get().setExpiresAfter( getMaxCacheLifetime( "jidNodeprep" ), TimeUnit.MILLISECONDS ); JID.DOMAINPREP_CACHE.policy().eviction().get().setMaximum( getMaxCacheSize( "jidDomainprep" ) ); JID.DOMAINPREP_CACHE.policy().expireAfterWrite().get().setExpiresAfter( getMaxCacheLifetime( "jidDomainprep" ), TimeUnit.MILLISECONDS ); JID.RESOURCEPREP_CACHE.policy().eviction().get().setMaximum( getMaxCacheSize( "jidResourceprep" ) ); JID.RESOURCEPREP_CACHE.policy().expireAfterWrite().get().setExpiresAfter( getMaxCacheLifetime( "jidResourceprep" ), TimeUnit.MILLISECONDS ); // Mock cache creation for the JID-internal classes, by wrapping them in a compatibility layer. caches.put("JID Node-parts", CaffeineCache.of( JID.NODEPREP_CACHE, "JID Node-parts" )); caches.put("JID Domain-parts", CaffeineCache.of( JID.DOMAINPREP_CACHE, "JID Domain-parts" )); caches.put("JID Resource-parts", CaffeineCache.of( JID.RESOURCEPREP_CACHE, "JID Resource-parts" )); } catch (Exception e) { log.error("Failed to instantiate local cache factory strategy: " + localCacheFactoryClass, e); throw new InitializationException(e); } } private static ClassLoader getClusteredCacheStrategyClassLoader() { PluginManager pluginManager = XMPPServer.getInstance().getPluginManager(); Plugin plugin = pluginManager.getPlugin("hazelcast"); if (plugin == null) { plugin = pluginManager.getPlugin("clustering"); if (plugin == null) { plugin = pluginManager.getPlugin("enterprise"); } } PluginClassLoader pluginLoader = pluginManager.getPluginClassloader(plugin); if (pluginLoader != null) { if (log.isDebugEnabled()) { StringBuffer pluginLoaderDetails = new StringBuffer("Clustering plugin class loader: "); pluginLoaderDetails.append(pluginLoader.getClass().getName()); for (URL url : pluginLoader.getURLs()) { pluginLoaderDetails.append("\n\t").append(url.toExternalForm()); } log.debug(pluginLoaderDetails.toString()); } return pluginLoader; } else { log.warn("CacheFactory - Unable to find a Plugin that provides clustering support."); return Thread.currentThread().getContextClassLoader(); } } public static void startClustering() { if (isClusteringAvailable()) { clusteringStarting = true; clusteringStarted = clusteredCacheFactoryStrategy.startCluster(); clusteringStarting = false; } if (clusteringStarted) { if (statsThread == null) { // Start a timing thread with 1 second of accuracy. statsThread = new Thread("Cache Stats") { private volatile boolean destroyed = false; @Override public void run() { XMPPServer.getInstance().addServerListener(new XMPPServerListener() { @Override public void serverStarted() {} @Override public void serverStopping() { destroyed = true; } }); ClusterManager.addListener(new ClusterEventListener() { @Override public void joinedCluster() {} @Override public void joinedCluster(byte[] nodeID) {} @Override public void leftCluster() { destroyed = true; ClusterManager.removeListener(this); } @Override public void leftCluster(byte[] nodeID) {} @Override public void markedAsSeniorClusterMember() {} }); // Run the timer indefinitely. while (!destroyed && ClusterManager.isClusteringEnabled()) { // Publish cache stats for this cluster node (assuming clustering is // enabled and there are stats to publish). try { cacheFactoryStrategy.updateCacheStats(caches); } catch (Exception e) { log.error(e.getMessage(), e); } try { // Sleep 10 seconds. sleep(10000); } catch (InterruptedException ie) { // Ignore. } } statsThread = null; log.debug("Cache stats thread terminated."); } }; statsThread.setDaemon(true); statsThread.start(); } } } public static void stopClustering() { // Stop the cluster clusteredCacheFactoryStrategy.stopCluster(); clusteredCacheFactoryStrategy = null; // Set the strategy to local cacheFactoryStrategy = localCacheFactoryStrategy; } /** * Notification message indicating that this JVM has joined a cluster. */ @SuppressWarnings("unchecked") public static synchronized void joinedCluster() { cacheFactoryStrategy = clusteredCacheFactoryStrategy; // Loop through local caches and switch them to clustered cache (copy content) Arrays.stream(getAllCaches()) .filter(CacheFactory::isClusterableCache) .forEach(cache -> { final CacheWrapper cacheWrapper = ((CacheWrapper) cache); final Cache clusteredCache = cacheFactoryStrategy.createCache(cacheWrapper.getName()); clusteredCache.putAll(cache); cacheWrapper.setWrappedCache(clusteredCache); }); clusteringStarting = false; clusteringStarted = true; log.info("Clustering started; cache migration complete"); } /** * Notification message indicating that this JVM has left the cluster. */ @SuppressWarnings("unchecked") public static synchronized void leftCluster() { clusteringStarted = false; cacheFactoryStrategy = localCacheFactoryStrategy; // Loop through clustered caches and change them to local caches (copy content) Arrays.stream(getAllCaches()) .filter(CacheFactory::isClusterableCache) .forEach(cache -> { final CacheWrapper cacheWrapper = ((CacheWrapper) cache); final Cache standaloneCache = cacheFactoryStrategy.createCache(cacheWrapper.getName()); standaloneCache.putAll(cache); cacheWrapper.setWrappedCache(standaloneCache); }); log.info("Clustering stopped; cache migration complete"); } /** * Indicates if the supplied Cache is "clusterable". This is used to determine if a cache should be migrated * between a {@link DefaultCache} and a clustered cache when the node joins/leaves the cluster. * <p> * A cache is considered 'clusterable' if; * <ul> * <li>the cache is not a 'local' cache - which apply to the local node only so do not need to be clustered, and</li> * <li>the cache is actually a {@link CacheWrapper} which wraps the underlying default or clustered cache</li> * </ul> * * @param cache the cache to check * @return {@code true} if the cache can be converted to/from a clustered cache, otherwise {@code false} */ private static boolean isClusterableCache(final Cache cache) { return cache instanceof CacheWrapper && !localOnly.contains(cache.getName()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.7.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING */ package backtype.storm.generated; import org.apache.commons.lang.builder.HashCodeBuilder; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class JavaObject implements org.apache.thrift7.TBase<JavaObject, JavaObject._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("JavaObject"); private static final org.apache.thrift7.protocol.TField FULL_CLASS_NAME_FIELD_DESC = new org.apache.thrift7.protocol.TField("full_class_name", org.apache.thrift7.protocol.TType.STRING, (short)1); private static final org.apache.thrift7.protocol.TField ARGS_LIST_FIELD_DESC = new org.apache.thrift7.protocol.TField("args_list", org.apache.thrift7.protocol.TType.LIST, (short)2); private String full_class_name; // required private List<JavaObjectArg> args_list; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift7.TFieldIdEnum { FULL_CLASS_NAME((short)1, "full_class_name"), ARGS_LIST((short)2, "args_list"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // FULL_CLASS_NAME return FULL_CLASS_NAME; case 2: // ARGS_LIST return ARGS_LIST; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.FULL_CLASS_NAME, new org.apache.thrift7.meta_data.FieldMetaData("full_class_name", org.apache.thrift7.TFieldRequirementType.REQUIRED, new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING))); tmpMap.put(_Fields.ARGS_LIST, new org.apache.thrift7.meta_data.FieldMetaData("args_list", org.apache.thrift7.TFieldRequirementType.REQUIRED, new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST, new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, JavaObjectArg.class)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(JavaObject.class, metaDataMap); } public JavaObject() { } public JavaObject( String full_class_name, List<JavaObjectArg> args_list) { this(); this.full_class_name = full_class_name; this.args_list = args_list; } /** * Performs a deep copy on <i>other</i>. */ public JavaObject(JavaObject other) { if (other.is_set_full_class_name()) { this.full_class_name = other.full_class_name; } if (other.is_set_args_list()) { List<JavaObjectArg> __this__args_list = new ArrayList<JavaObjectArg>(); for (JavaObjectArg other_element : other.args_list) { __this__args_list.add(new JavaObjectArg(other_element)); } this.args_list = __this__args_list; } } public JavaObject deepCopy() { return new JavaObject(this); } @Override public void clear() { this.full_class_name = null; this.args_list = null; } public String get_full_class_name() { return this.full_class_name; } public void set_full_class_name(String full_class_name) { this.full_class_name = full_class_name; } public void unset_full_class_name() { this.full_class_name = null; } /** Returns true if field full_class_name is set (has been assigned a value) and false otherwise */ public boolean is_set_full_class_name() { return this.full_class_name != null; } public void set_full_class_name_isSet(boolean value) { if (!value) { this.full_class_name = null; } } public int get_args_list_size() { return (this.args_list == null) ? 0 : this.args_list.size(); } public java.util.Iterator<JavaObjectArg> get_args_list_iterator() { return (this.args_list == null) ? null : this.args_list.iterator(); } public void add_to_args_list(JavaObjectArg elem) { if (this.args_list == null) { this.args_list = new ArrayList<JavaObjectArg>(); } this.args_list.add(elem); } public List<JavaObjectArg> get_args_list() { return this.args_list; } public void set_args_list(List<JavaObjectArg> args_list) { this.args_list = args_list; } public void unset_args_list() { this.args_list = null; } /** Returns true if field args_list is set (has been assigned a value) and false otherwise */ public boolean is_set_args_list() { return this.args_list != null; } public void set_args_list_isSet(boolean value) { if (!value) { this.args_list = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case FULL_CLASS_NAME: if (value == null) { unset_full_class_name(); } else { set_full_class_name((String)value); } break; case ARGS_LIST: if (value == null) { unset_args_list(); } else { set_args_list((List<JavaObjectArg>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case FULL_CLASS_NAME: return get_full_class_name(); case ARGS_LIST: return get_args_list(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case FULL_CLASS_NAME: return is_set_full_class_name(); case ARGS_LIST: return is_set_args_list(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof JavaObject) return this.equals((JavaObject)that); return false; } public boolean equals(JavaObject that) { if (that == null) return false; boolean this_present_full_class_name = true && this.is_set_full_class_name(); boolean that_present_full_class_name = true && that.is_set_full_class_name(); if (this_present_full_class_name || that_present_full_class_name) { if (!(this_present_full_class_name && that_present_full_class_name)) return false; if (!this.full_class_name.equals(that.full_class_name)) return false; } boolean this_present_args_list = true && this.is_set_args_list(); boolean that_present_args_list = true && that.is_set_args_list(); if (this_present_args_list || that_present_args_list) { if (!(this_present_args_list && that_present_args_list)) return false; if (!this.args_list.equals(that.args_list)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_full_class_name = true && (is_set_full_class_name()); builder.append(present_full_class_name); if (present_full_class_name) builder.append(full_class_name); boolean present_args_list = true && (is_set_args_list()); builder.append(present_args_list); if (present_args_list) builder.append(args_list); return builder.toHashCode(); } public int compareTo(JavaObject other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; JavaObject typedOther = (JavaObject)other; lastComparison = Boolean.valueOf(is_set_full_class_name()).compareTo(typedOther.is_set_full_class_name()); if (lastComparison != 0) { return lastComparison; } if (is_set_full_class_name()) { lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.full_class_name, typedOther.full_class_name); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_args_list()).compareTo(typedOther.is_set_args_list()); if (lastComparison != 0) { return lastComparison; } if (is_set_args_list()) { lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.args_list, typedOther.args_list); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException { org.apache.thrift7.protocol.TField field; iprot.readStructBegin(); while (true) { field = iprot.readFieldBegin(); if (field.type == org.apache.thrift7.protocol.TType.STOP) { break; } switch (field.id) { case 1: // FULL_CLASS_NAME if (field.type == org.apache.thrift7.protocol.TType.STRING) { this.full_class_name = iprot.readString(); } else { org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type); } break; case 2: // ARGS_LIST if (field.type == org.apache.thrift7.protocol.TType.LIST) { { org.apache.thrift7.protocol.TList _list0 = iprot.readListBegin(); this.args_list = new ArrayList<JavaObjectArg>(_list0.size); for (int _i1 = 0; _i1 < _list0.size; ++_i1) { JavaObjectArg _elem2; // required _elem2 = new JavaObjectArg(); _elem2.read(iprot); this.args_list.add(_elem2); } iprot.readListEnd(); } } else { org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type); } break; default: org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); validate(); } public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException { validate(); oprot.writeStructBegin(STRUCT_DESC); if (this.full_class_name != null) { oprot.writeFieldBegin(FULL_CLASS_NAME_FIELD_DESC); oprot.writeString(this.full_class_name); oprot.writeFieldEnd(); } if (this.args_list != null) { oprot.writeFieldBegin(ARGS_LIST_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.args_list.size())); for (JavaObjectArg _iter3 : this.args_list) { _iter3.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } @Override public String toString() { StringBuilder sb = new StringBuilder("JavaObject("); boolean first = true; sb.append("full_class_name:"); if (this.full_class_name == null) { sb.append("null"); } else { sb.append(this.full_class_name); } first = false; if (!first) sb.append(", "); sb.append("args_list:"); if (this.args_list == null) { sb.append("null"); } else { sb.append(this.args_list); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift7.TException { // check for required fields if (!is_set_full_class_name()) { throw new org.apache.thrift7.protocol.TProtocolException("Required field 'full_class_name' is unset! Struct:" + toString()); } if (!is_set_args_list()) { throw new org.apache.thrift7.protocol.TProtocolException("Required field 'args_list' is unset! Struct:" + toString()); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out))); } catch (org.apache.thrift7.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in))); } catch (org.apache.thrift7.TException te) { throw new java.io.IOException(te); } } }
/* * * * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * * */ /** ******************* ISSUE LIST ************************** * * 1. In general, should we clone attributes in the constructor to avoid bad uses of input variables later on? * * 2. How are we going to update the probabilities? Value by value? Or directly with the whole set of probabilities? or both? * Two methods are included: setProbabilities(double[] probabilities) and setProbabilityOfState(int index, double value) * * 3. Is needed the method setProbabilityOfState ? * ******************************************************** */ package eu.amidst.core.distribution; import eu.amidst.core.exponentialfamily.EF_Multinomial; import eu.amidst.core.exponentialfamily.MomentParameters; import eu.amidst.core.utils.Utils; import eu.amidst.core.variables.Variable; import eu.amidst.core.variables.stateSpaceTypes.FiniteStateSpace; import java.util.Arrays; import java.util.Random; /** * This class extends the abstract class {@link UnivariateDistribution} and defines the univariate multinomial distribution. * * <p> For an example of use follow this link </p> * <p> <a href="http://amidst.github.io/toolbox/CodeExamples.html#bnmodifyexample"> http://amidst.github.io/toolbox/CodeExamples.html#bnmodifyexample </a> </p> * */ public class Multinomial extends UnivariateDistribution { private static final long serialVersionUID = 8587756877237341367L; /** Represents a set of probabilities, one for each state of the variable. */ private double[] probabilities; /** * Creates a new Multinomial distribution for a given variable. * @param var1 a discrete {@link Variable} object. */ public Multinomial(Variable var1) { this.var = var1; this.probabilities = new double[var.getNumberOfStates()]; for (int i = 0; i < var.getNumberOfStates(); i++) { this.probabilities[i] = 1.0 / var.getNumberOfStates(); } } /** * {@inheritDoc} */ @Override public double[] getParameters() { double[] param = new double[this.getNumberOfParameters()]; System.arraycopy(this.probabilities, 0, param, 0, this.probabilities.length); return param; } /** * {@inheritDoc} */ @Override public int getNumberOfParameters() { return probabilities.length; } /** * Sets the probability values of this Multinomial distribution. * @param probabilities1 an array of probability values having the same order as the variable states. */ public void setProbabilities(double[] probabilities1) { this.probabilities = probabilities1; } /** * Sets a probability value in a given position in the array of probabilities. * @param state the position in which the probability is set. * @param prob a probability value. */ public void setProbabilityOfState(int state, double prob) { this.probabilities[state] = prob; } /** * Returns the probability value of a given position in the array of probabilities. * @param state the position for which the probability is extracted. * @return a probability value. */ public double getProbabilityOfState(int state) { return this.probabilities[state]; } /** * Returns the probability value of a given multinomial state. * @param name the name of the state. * @return a probability value. */ public double getProbabilityOfState(String name) { FiniteStateSpace stateSpace = this.var.getStateSpaceType(); return this.probabilities[stateSpace.getIndexOfState(name)]; } /** * Returns the set of probabilities for the different states of the variable. * @return an array of double corresponding to the probability values. */ public double[] getProbabilities() { return probabilities; } /** * Returns the logarithm of the probability for a given variable state. * @param value The position of the variable state in the array of probabilities (represented as a * double for generality reasons). * @return a double value corresponding to the logarithm of the probability. */ @Override public double getLogProbability(double value) { return Math.log(this.probabilities[(int) value]); } /** * {@inheritDoc} */ @Override public double sample(Random rand) { double b = 0, r = rand.nextDouble(); for (int i = 0; i < probabilities.length; i++) { b += probabilities[i]; if (b > r) { return i; } } return probabilities.length-1; } @Override public UnivariateDistribution deepCopy(Variable variable) { Multinomial copy = new Multinomial(variable); copy.probabilities = Arrays.copyOf(this.getProbabilities(),this.getProbabilities().length); return copy; } public String label() { return "Multinomial"; } /** * {@inheritDoc} */ @Override public void randomInitialization(Random random) { for (int i = 0; i < probabilities.length; i++) { probabilities[i] = random.nextDouble()+0.2; } probabilities = Utils.normalize(probabilities); } /** * {@inheritDoc} */ @Override public boolean equalDist(Distribution dist, double threshold) { if (dist instanceof Multinomial) return this.equalDist((Multinomial)dist,threshold); return false; } /** * {@inheritDoc} */ @Override public String toString() { StringBuilder str = new StringBuilder(); str.append("[ "); int size = this.getProbabilities().length; for(int i=0;i<size;i++){ str.append(this.getProbabilities()[i]); if(i<size-1) { str.append(", "); } } str.append(" ]"); return str.toString(); } /** * Tests if a given Multinomial distribution is equal to this Multinomial distribution. * @param dist a given Multinomial distribution. * @param threshold a threshold. * @return true if the two Multinomial distributions are equal, false otherwise. */ public boolean equalDist(Multinomial dist, double threshold){ boolean equals = true; for (int i = 0; i < this.probabilities.length; i++) { equals = equals && Math.abs(this.getProbabilityOfState(i) - dist.getProbabilityOfState(i)) <= threshold; } return equals; } /** * {@inheritDoc} */ @Override public EF_Multinomial toEFUnivariateDistribution() { EF_Multinomial efMultinomial = new EF_Multinomial(this.getVariable()); MomentParameters momentParameters = efMultinomial.createZeroMomentParameters(); for (int i = 0; i < this.getVariable().getNumberOfStates(); i++) { momentParameters.set(i, this.getProbabilityOfState(i)); } efMultinomial.setMomentParameters(momentParameters); return efMultinomial; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.dmn.engine.api; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown; import java.util.Arrays; import java.util.List; import java.util.Map; import org.assertj.core.api.Fail; import org.camunda.bpm.dmn.engine.DmnDecisionRuleResult; import org.camunda.bpm.dmn.engine.DmnDecisionTableResult; import org.camunda.bpm.dmn.engine.impl.DmnDecisionResultException; import org.camunda.bpm.dmn.engine.test.DecisionResource; import org.camunda.bpm.dmn.engine.test.DmnEngineTest; import org.camunda.bpm.engine.variable.Variables; import org.camunda.bpm.engine.variable.value.TypedValue; import org.junit.Test; public class DmnResultTest extends DmnEngineTest { public static final String NO_OUTPUT_VALUE = "noOutputValue"; public static final String SINGLE_OUTPUT_VALUE = "singleOutputValue"; public static final String MULTIPLE_OUTPUT_VALUES = "multipleOutputValues"; public static final String RESULT_TEST_DMN = "DmnResultTest.dmn"; public static final String RESULT_TEST_WITH_TYPES_DMN = "DmnResultTypedTest.dmn"; @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testNoResult() { DmnDecisionTableResult results = evaluateWithMatchingRules(); assertThat(results).isEmpty(); assertThat(results.getFirstResult()).isNull(); assertThat(results.getSingleResult()).isNull(); } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testSingleResult() { DmnDecisionTableResult results = evaluateWithMatchingRules(SINGLE_OUTPUT_VALUE); assertThat(results).hasSize(1); assertSingleOutputValue(results.get(0)); assertSingleOutputValue(results.getFirstResult()); assertSingleOutputValue(results.getSingleResult()); } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testMultipleResults() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(NO_OUTPUT_VALUE, SINGLE_OUTPUT_VALUE, MULTIPLE_OUTPUT_VALUES); assertThat(decisionResult).hasSize(3); DmnDecisionRuleResult ruleResult = decisionResult.get(0); assertNoOutputValue(ruleResult); ruleResult = decisionResult.get(1); assertSingleOutputValue(ruleResult); ruleResult = decisionResult.get(2); assertMultipleOutputValues(ruleResult); ruleResult = decisionResult.getFirstResult(); assertNoOutputValue(ruleResult); try { decisionResult.getSingleResult(); failBecauseExceptionWasNotThrown(DmnDecisionResultException.class); } catch (DmnDecisionResultException e){ assertThat(e) .hasMessageStartingWith("DMN-01008") .hasMessageContaining("singleValue") .hasMessageContaining("multipleValues1") .hasMessageContaining("multipleValues2"); } } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testNoOutputValue() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(NO_OUTPUT_VALUE); assertThat(decisionResult).hasSize(1); assertNoOutputValue(decisionResult.getFirstResult()); } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testSingleOutputValue() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(SINGLE_OUTPUT_VALUE); assertThat(decisionResult).hasSize(1); assertSingleOutputValue(decisionResult.getFirstResult()); } @Test @DecisionResource public void testSingleOutputNoName() { DmnDecisionTableResult decisionResult = evaluateDecisionTable(); assertThat(decisionResult).hasSize(1); assertThat(decisionResult.getFirstResult()).hasSize(1); assertThat(decisionResult.getFirstResult().getSingleEntry()).isEqualTo("outputValue"); assertThat(decisionResult.getFirstResult().get(null)).isEqualTo("outputValue"); } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testMultipleOutputValues() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(MULTIPLE_OUTPUT_VALUES); assertThat(decisionResult).hasSize(1); assertMultipleOutputValues(decisionResult.getFirstResult()); } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testCollectOutputValues() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(NO_OUTPUT_VALUE, SINGLE_OUTPUT_VALUE, MULTIPLE_OUTPUT_VALUES); assertThat(decisionResult).hasSize(3); List<String> entryValues = decisionResult.collectEntries("firstOutput"); assertThat(entryValues).containsExactly("singleValue", "multipleValues1"); entryValues = decisionResult.collectEntries("secondOutput"); assertThat(entryValues).containsExactly("multipleValues2"); } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testOutputList() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(SINGLE_OUTPUT_VALUE, MULTIPLE_OUTPUT_VALUES); List<Map<String, Object>> entryMapList = decisionResult.getResultList(); assertThat(entryMapList).hasSize(2); Map<String, Object> firstResult = entryMapList.get(0); assertThat(firstResult).hasSize(1); assertThat(firstResult).containsEntry("firstOutput", "singleValue"); Map<String, Object> secondResult = entryMapList.get(1); assertThat(secondResult).hasSize(2); assertThat(secondResult).containsEntry("firstOutput", "multipleValues1"); assertThat(secondResult).containsEntry("secondOutput", "multipleValues2"); } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testValueMap() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(MULTIPLE_OUTPUT_VALUES); DmnDecisionRuleResult ruleResult = decisionResult.getSingleResult(); assertThat(ruleResult).hasSize(2); Map<String, Object> entryMap = ruleResult.getEntryMap(); assertThat(entryMap).hasSize(2); assertThat(entryMap).containsEntry("firstOutput", "multipleValues1"); assertThat(entryMap).containsEntry("secondOutput", "multipleValues2"); } @Test @DecisionResource(resource = RESULT_TEST_DMN) public void testSingleOutputUntypedValue() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(SINGLE_OUTPUT_VALUE); assertThat(decisionResult).hasSize(1); DmnDecisionRuleResult ruleResult = decisionResult.getFirstResult(); TypedValue typedEntry = ruleResult.getEntryTyped("firstOutput"); assertThat(typedEntry).isEqualTo(Variables.untypedValue("singleValue")); typedEntry = ruleResult.getEntryTyped("secondOutput"); assertThat(typedEntry).isNull(); typedEntry = ruleResult.getFirstEntryTyped(); assertThat(typedEntry).isEqualTo(Variables.untypedValue("singleValue")); typedEntry = ruleResult.getSingleEntryTyped(); assertThat(typedEntry).isEqualTo(Variables.untypedValue("singleValue")); } @Test @DecisionResource(resource = RESULT_TEST_WITH_TYPES_DMN) public void testSingleOutputTypedValue() { DmnDecisionTableResult decisionResult = evaluateWithMatchingRules(SINGLE_OUTPUT_VALUE); assertThat(decisionResult).hasSize(1); DmnDecisionRuleResult ruleResult = decisionResult.getFirstResult(); TypedValue typedValue = ruleResult.getEntryTyped("firstOutput"); assertThat(typedValue).isEqualTo(Variables.stringValue("singleValue")); typedValue = ruleResult.getEntryTyped("secondOutput"); assertThat(typedValue).isNull(); typedValue = ruleResult.getFirstEntryTyped(); assertThat(typedValue).isEqualTo(Variables.stringValue("singleValue")); typedValue = ruleResult.getSingleEntryTyped(); assertThat(typedValue).isEqualTo(Variables.stringValue("singleValue")); } // helper methods protected DmnDecisionTableResult evaluateWithMatchingRules(String... matchingRules) { List<String> matchingRulesList = Arrays.asList(matchingRules); variables.putValue(NO_OUTPUT_VALUE, matchingRulesList.contains(NO_OUTPUT_VALUE)); variables.putValue(SINGLE_OUTPUT_VALUE, matchingRulesList.contains(SINGLE_OUTPUT_VALUE)); variables.putValue(MULTIPLE_OUTPUT_VALUES, matchingRulesList.contains(MULTIPLE_OUTPUT_VALUES)); return evaluateDecisionTable(); } protected void assertSingleOutputValue(DmnDecisionRuleResult decisionRuleResult) { assertThat(decisionRuleResult.size()).isEqualTo(1); String value = (String) decisionRuleResult.get("firstOutput"); assertThat(value).isEqualTo("singleValue"); value = (String) decisionRuleResult.get("secondOutput"); assertThat(value).isNull(); value = decisionRuleResult.getFirstEntry(); assertThat(value).isEqualTo("singleValue"); value = decisionRuleResult.getSingleEntry(); assertThat(value).isEqualTo("singleValue"); } protected void assertNoOutputValue(DmnDecisionRuleResult decisionRuleResult) { assertThat(decisionRuleResult.size()).isEqualTo(0); String value = (String) decisionRuleResult.get("firstOutput"); assertThat(value).isNull(); value = (String) decisionRuleResult.get("secondOutput"); assertThat(value).isNull(); value = decisionRuleResult.getFirstEntry(); assertThat(value).isNull(); value = decisionRuleResult.getSingleEntry(); assertThat(value).isNull(); } protected void assertMultipleOutputValues(DmnDecisionRuleResult decisionRuleResult) { assertThat(decisionRuleResult.size()).isEqualTo(2); String value = (String) decisionRuleResult.get("firstOutput"); assertThat(value).isEqualTo("multipleValues1"); value = (String) decisionRuleResult.get("secondOutput"); assertThat(value).isEqualTo("multipleValues2"); value = decisionRuleResult.getFirstEntry(); assertThat(value).isEqualTo("multipleValues1"); try { decisionRuleResult.getSingleEntry(); Fail.failBecauseExceptionWasNotThrown(DmnDecisionResultException.class); } catch (DmnDecisionResultException e) { assertThat(e) .hasMessageStartingWith("DMN-01007") .hasMessageContaining("multipleValues1") .hasMessageContaining("multipleValues2"); } } }
package com.forum.fiend.osp; import java.util.ArrayList; import java.util.Vector; import com.google.gson.internal.LinkedTreeMap; import android.annotation.SuppressLint; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.AsyncTask; import android.os.AsyncTask.Status; import android.os.Bundle; import android.support.v4.app.FragmentActivity; import android.support.v4.app.ListFragment; import android.util.Log; import android.view.ContextMenu; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ContextMenu.ContextMenuInfo; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.AbsListView.OnScrollListener; import android.widget.AdapterView; import android.widget.AdapterView.AdapterContextMenuInfo; import android.widget.Toast; @SuppressLint("NewApi") public class CategoriesFragment extends ListFragment { private String server_address; private String subforum_id = "0"; private String background; private String userid; private Category clicked_category; private String username; //private String hashId = "0"; private String storagePrefix = ""; private downloadCategories categoriesDownloader; private ForumFiendApp application; private String searchQuery = ""; private String passedSubforum = ""; private String screenTitle; private String screenSubtitle; private int startingPos = 0; private int endingPos = 20; private boolean canScrollMoreThreads = true; private boolean isExtraScrolling = false; private boolean isLoading = false; private boolean initialLoadComplete = false; private String subforumParts[]; private String shareURL = "0"; private FragmentActivity activity; private String totalHash; @Override public void onCreate(Bundle bundle) { super.onCreate(bundle); activity = (FragmentActivity)getActivity(); application = (ForumFiendApp)activity.getApplication(); if(activity != null) { if(activity.getActionBar() != null) { if(activity.getActionBar().getSubtitle() != null) { screenSubtitle = activity.getActionBar().getSubtitle().toString(); } } } setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState) { return super.onCreateView(inflater, container, savedInstanceState); } @Override public void onStart() { super.onStart(); if(!(application.getSession().getServer().serverBackground.contentEquals(application.getSession().getServer().serverBoxColor) && application.getSession().getServer().serverBoxBorder.contentEquals("0"))) { getListView().setDivider(null); } Bundle bundle = getArguments(); subforum_id = bundle.getString("subforum_id"); background = bundle.getString("background"); screenTitle = bundle.getString("subforum_name"); passedSubforum = subforum_id; if(bundle.containsKey("query")) { searchQuery = bundle.getString("query"); } //Log.i("Forum Fiend", "**** New CategoriesFragment Instance ****"); //Log.d("Forum Fiend","Passed subforum " + subforum_id); totalHash = subforum_id; if(subforum_id.contains("###")) { subforumParts = subforum_id.split("###"); Log.d("Forum Fiend","Subforum has " + subforumParts.length + " parts."); subforum_id = subforumParts[0]; //hashId = subforumParts[1]; } else { subforumParts = new String[1]; subforumParts[0] = subforum_id; } Log.d("Forum Fiend","Entering subforum " + subforum_id); server_address = application.getSession().getServer().serverAddress; if(getString(R.string.server_location).contentEquals("0")) { storagePrefix = server_address + "_"; } userid = application.getSession().getServer().serverUserId; username = application.getSession().getServer().serverPassword; String shareId = subforum_id; //if(hashId != "0") { // shareId = hashId; //} if(shareId.contentEquals("0")) { shareURL = application.getSession().getServer().serverAddress; } else { if(application.getSession().forumSystem == 1) { shareURL = application.getSession().getServer().serverAddress + "/viewforum.php?f=" + shareId; } } getListView().setOnScrollListener(listScrolled); //Log.d("Forum Fiend","CF OnStart Completed"); } @Override public void onPause() { if(!subforum_id.contentEquals("unread") && !subforum_id.contentEquals("participated") && !subforum_id.contentEquals("userrecent") && !subforum_id.contentEquals("favs") && !subforum_id.contentEquals("search") && !subforum_id.contentEquals("forum_favs")) { String scrollY = Integer.toString(getListView().getFirstVisiblePosition()); SharedPreferences app_preferences = activity.getSharedPreferences("prefs", 0); SharedPreferences.Editor editor = app_preferences.edit(); editor.putString(storagePrefix + "forumScrollPosition" + passedSubforum, scrollY); editor.commit(); } endCurrentlyRunning(); super.onPause(); } @Override public void onResume() { //Log.d("Forum Fiend","CF OnResume Began"); activity.getActionBar().setTitle(screenTitle); activity.getActionBar().setSubtitle(screenSubtitle); //activity.getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); SharedPreferences app_preferences = activity.getSharedPreferences("prefs", 0); String cachedForum = app_preferences.getString(storagePrefix + "forum" + subforum_id, "n/a"); if(!(cachedForum.contentEquals("n/a"))) { try { Object[][] forumObject = GsonHelper.customGson.fromJson(cachedForum, Object[][].class); parseCachedForums(forumObject); Log.d("Forum Fiend","Forum cache available, using it"); } catch(Exception ex) { if(ex.getMessage() != null) { Log.e("Forum Fiend",ex.getMessage()); } } } load_categories(); if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.HONEYCOMB) { activity.invalidateOptionsMenu(); } //Log.d("Forum Fiend","CF OnResume Completed"); super.onResume(); } private void endCurrentlyRunning() { //Stop any running tasks if(categoriesDownloader != null) { if(categoriesDownloader.getStatus() == Status.RUNNING) { categoriesDownloader.cancel(true); Log.i("Forum Fiend","Killed Currently Running"); } } } @Override public void onStop() { super.onStop(); endCurrentlyRunning(); } private void load_categories() { Log.d("Forum Fiend","CF Starting load_categories"); endCurrentlyRunning(); categoriesDownloader = new downloadCategories(); if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.HONEYCOMB) { categoriesDownloader.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); } else { categoriesDownloader.execute(); } } private class downloadCategories extends AsyncTask<String, Void, Object[][]> { @Override protected void onPreExecute() { Log.i("Forum Fiend","downloadCategories onPreExecute"); super.onPreExecute(); } @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected Object[][] doInBackground(String... params) { Log.i("Forum Fiend","downloadCategories doInBackground"); if(activity == null) { Log.e("Forum Fiend","Category activity is null!"); return null; } isLoading = true; Object[][] result = new Object[5][50]; Vector paramz; //Do not get a forum listing if we are inside one of the special sections if(!subforum_id.contentEquals("timeline") && !subforum_id.contentEquals("unread") && !subforum_id.contentEquals("participated") && !subforum_id.contentEquals("userrecent") && !subforum_id.contentEquals("favs") && !subforum_id.contentEquals("search") && !subforum_id.contentEquals("forum_favs")) { if(!isExtraScrolling) { try { paramz = new Vector(); if(!subforum_id.contentEquals("0")) { paramz.addElement(new Boolean(true)); paramz.addElement(subforum_id); } //result[0] = (Object[]) application.getSession().performSynchronousCall("get_forum", paramz); result[0] = (Object[]) application.getSession().performNewSynchronousCall("get_forum", paramz); if(result[0] == null) { Log.e("Forum Fiend","shits null on " + subforum_id); } } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } try { //First grab any announcement topics paramz = new Vector(); paramz.addElement(subforum_id); paramz.addElement(0); paramz.addElement(20); paramz.addElement("ANN"); //result[1][0] = application.getSession().performSynchronousCall("get_topic", paramz); result[1][0] = application.getSession().performNewSynchronousCall("get_topic", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } try { //Then grab any sticky topics paramz = new Vector(); paramz.addElement(subforum_id); paramz.addElement(0); paramz.addElement(20); paramz.addElement("TOP"); //result[2][0] = application.getSession().performSynchronousCall("get_topic", paramz); result[2][0] = application.getSession().performNewSynchronousCall("get_topic", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } } try { //Grab the non-sticky topics Log.d("Forum Fiend","Getting topics " + startingPos + " through " + endingPos); paramz = new Vector(); paramz.addElement(subforum_id); paramz.addElement(startingPos); paramz.addElement(endingPos); //result[3][0] = application.getSession().performSynchronousCall("get_topic", paramz); result[3][0] = application.getSession().performNewSynchronousCall("get_topic", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } } //Handle topic listing for the Search function if(subforum_id.contentEquals("search")) { try { paramz = new Vector(); paramz.addElement(searchQuery.getBytes()); paramz.addElement(startingPos); paramz.addElement(endingPos); //result[3][0] = application.getSession().performSynchronousCall("search_topic", paramz); result[3][0] = application.getSession().performNewSynchronousCall("search_topic", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } } if(!isExtraScrolling) { //Handle topic listing for the Unread category if(subforum_id.contentEquals("unread")) { try { paramz = new Vector(); //result[3][0] = application.getSession().performSynchronousCall("get_unread_topic", paramz); result[3][0] = application.getSession().performNewSynchronousCall("get_unread_topic", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } } } //Handle timeline get_latest_topic if(subforum_id.contentEquals("timeline")) { try { paramz = new Vector(); //paramz.addElement(username.getBytes()); paramz.addElement(startingPos); paramz.addElement(endingPos); //paramz.addElement(""); //paramz.addElement(userid); //result[3][0] = application.getSession().performSynchronousCall("get_participated_topic", paramz); result[3][0] = application.getSession().performNewSynchronousCall("get_latest_topic", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } } //Handle participated topics category if(subforum_id.contentEquals("participated")) { try { paramz = new Vector(); paramz.addElement(username.getBytes()); paramz.addElement(startingPos); paramz.addElement(endingPos); paramz.addElement(""); paramz.addElement(userid); //result[3][0] = application.getSession().performSynchronousCall("get_participated_topic", paramz); result[3][0] = application.getSession().performNewSynchronousCall("get_participated_topic", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } } //Handle subscription category if(subforum_id.contentEquals("favs")) { try { paramz = new Vector(); paramz.addElement(startingPos); paramz.addElement(endingPos); //result[3][0] = application.getSession().performSynchronousCall("get_subscribed_topic", paramz); result[3][0] = application.getSession().performNewSynchronousCall("get_subscribed_topic", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", ex.getMessage()); } } } //Handle favorites category if(subforum_id.contentEquals("forum_favs")) { try { paramz = new Vector(); //result[0] = (Object[]) client.execute("get_subscribed_forum", paramz); //result[4][0] = application.getSession().performSynchronousCall("get_subscribed_forum", paramz); result[4][0] = application.getSession().performNewSynchronousCall("get_subscribed_forum", paramz); } catch (Exception ex) { if(ex.getMessage() != null) { Log.w("Forum Fiend", "Favorites Error: " + ex.getMessage()); } } } return result; } protected void onPostExecute(final Object[][] result) { Log.i("Forum Fiend","downloadCategories onPostExecute"); if(activity == null) { return; } if(result == null) { Toast toast = Toast.makeText(activity, "Error pulling data from the server, ecCFDL", Toast.LENGTH_SHORT); toast.show(); return; } Log.i("Forum Fiend","Recieved category data!"); initialLoadComplete = true; isLoading = false; String objectString = GsonHelper.customGson.toJson(result); //Log.i("Forum Fiend",objectString); SharedPreferences app_preferences = activity.getSharedPreferences("prefs", 0); String cachedForum = app_preferences.getString(storagePrefix + "forum" + subforum_id, "n/a"); if(objectString.contentEquals(cachedForum)) { return; } else { if(!isExtraScrolling) { SharedPreferences.Editor editor = app_preferences.edit(); editor.putString(storagePrefix + "forum" + subforum_id, objectString); editor.commit(); } } if(activity != null) { Object[][] forumObject = GsonHelper.customGson.fromJson(objectString, Object[][].class); parseCachedForums(forumObject); } } } private ArrayList<Category> categoryList; private boolean initialParseDone = false; @SuppressWarnings("rawtypes") private void parseCachedForums(Object[][] result) { if(categoryList == null || !isExtraScrolling) { categoryList = new ArrayList<Category>(); } int retainedPosition = getListView().getFirstVisiblePosition(); if(!initialParseDone) { SharedPreferences app_preferences = activity.getSharedPreferences("prefs", 0); String savedForumPosition = app_preferences.getString(storagePrefix + "forumScrollPosition" + passedSubforum, "0"); retainedPosition = Integer.parseInt(savedForumPosition); } //Announcement Topics for(Object o: result[1]) { if(o != null) { LinkedTreeMap map = (LinkedTreeMap) o; if(map.containsKey("topics")) { ArrayList topics = (ArrayList) map.get("topics"); for(Object t:topics) { LinkedTreeMap topicMap = (LinkedTreeMap) t; Category ca = new Category(); ca.category_name = (String) topicMap.get("topic_title"); ca.subforum_id = subforum_id; //if(!hashId.contentEquals("0")) { // ca.subforum_id = hashId; //} ca.category_id = (String) topicMap.get("topic_id"); ca.category_lastupdate = (String) topicMap.get("last_reply_time"); ca.category_lastthread = (String) topicMap.get("topic_author_name"); ca.topicSticky = "Y"; ca.categoryType = "C"; ca.categoryColor = background; if(topicMap.get("reply_number") != null) { ca.thread_count = topicMap.get("reply_number").toString().replace(".0", ""); } if(topicMap.get("view_number") != null) { ca.view_count = topicMap.get("view_number").toString().replace(".0", ""); } if(topicMap.get("new_post") != null) { ca.hasNewTopic = (Boolean) topicMap.get("new_post"); } if(topicMap.get("is_closed") != null) { ca.isLocked = (Boolean) topicMap.get("is_closed"); } if(topicMap.containsKey("icon_url")) { if(topicMap.get("icon_url") != null) { ca.categoryIcon = (String) topicMap.get("icon_url"); } } if(topicMap.get("can_stick") != null) { ca.canSticky = (Boolean) topicMap.get("can_stick"); } if(topicMap.get("can_delete") != null) { ca.canDelete = (Boolean) topicMap.get("can_delete"); } if(topicMap.get("can_close") != null) { ca.canLock = (Boolean) topicMap.get("can_close"); } categoryList.add(ca); } } } } //Sticky Topics for(Object o: result[2]) { if(o != null) { LinkedTreeMap map = (LinkedTreeMap) o; if(map.containsKey("topics")) { ArrayList topics = (ArrayList) map.get("topics"); for(Object t:topics) { LinkedTreeMap topicMap = (LinkedTreeMap) t; Category ca = new Category(); ca.category_name = (String) topicMap.get("topic_title"); ca.subforum_id = subforum_id; //if(!hashId.contentEquals("0")) { // ca.subforum_id = hashId; //} ca.category_id = (String) topicMap.get("topic_id"); ca.category_lastupdate = (String) topicMap.get("last_reply_time"); ca.category_lastthread = (String) topicMap.get("topic_author_name"); ca.topicSticky = "Y"; ca.categoryType = "C"; ca.categoryColor = background; if(topicMap.get("reply_number") != null) { ca.thread_count = topicMap.get("reply_number").toString().replace(".0", ""); } if(topicMap.get("view_number") != null) { ca.view_count = topicMap.get("view_number").toString().replace(".0", ""); } if(topicMap.get("new_post") != null) { ca.hasNewTopic = (Boolean) topicMap.get("new_post"); } if(topicMap.containsKey("icon_url")) { if(topicMap.get("icon_url") != null) { ca.categoryIcon = (String) topicMap.get("icon_url"); } } if(topicMap.get("is_closed") != null) { ca.isLocked = (Boolean) topicMap.get("is_closed"); } if(topicMap.get("can_stick") != null) { ca.canSticky = (Boolean) topicMap.get("can_stick"); } if(topicMap.get("can_delete") != null) { ca.canDelete = (Boolean) topicMap.get("can_delete"); } if(topicMap.get("can_close") != null) { ca.canLock = (Boolean) topicMap.get("can_close"); } categoryList.add(ca); } } } } Log.d("Forum Fiend","Starting category parse!"); //Forums if(result[0] != null) { ArrayList<Category> forumz = CategoryParser.parseCategories(result[0], subforum_id, background); Log.d("Forum Fiend","Forums parsed!"); String currentHash = subforumParts[0]; Log.d("Forum Fiend","Hash Size: " + subforumParts.length); if(subforumParts.length == 1) { for(Category c:forumz) { categoryList.add(c); } } else { for(int i = 1;i<subforumParts.length;i++) { currentHash = currentHash + "###" + subforumParts[i]; Log.d("Forum Fiend","Checking hash: " + currentHash + " (total hash is " + totalHash + ")"); ArrayList<Category> tempForums = null; for(Category c:forumz) { if(c.children != null && c.category_id.contentEquals(currentHash)) { tempForums = c.children; } } if(tempForums != null) { forumz = tempForums; if(currentHash.contentEquals(totalHash)) { for(Category c:forumz) { categoryList.add(c); } } } } } } Log.d("Forum Fiend","Finished category parse!"); //sdf /* for(Object o: result[0]) { if(o != null) { LinkedTreeMap map = (LinkedTreeMap) o; Category ca = new Category(); ca.category_name = (String) map.get("forum_name"); ca.subforum_id = subforum_id; ca.category_id = (String) map.get("forum_id"); ca.categoryType = "S"; ca.categoryColor = background; if(map.containsKey("logo_url")) { if(map.get("logo_url") != null) { ca.categoryIcon = (String) map.get("logo_url"); } } if(map.containsKey("url")) { if(map.get("url") != null) { ca.category_URL = (String) map.get("url"); } } if(map.get("is_subscribed") != null) { ca.isSubscribed = (Boolean) map.get("is_subscribed"); } if(map.get("can_subscribe") != null) { ca.canSubscribe = (Boolean) map.get("can_subscribe"); } if(map.get("new_post") != null) { ca.hasNewTopic = (Boolean) map.get("new_post"); } Boolean subOnly = false; if(map.containsKey("sub_only")) { if(map.get("sub_only") != null) { subOnly = (Boolean) map.get("sub_only"); ca.hasChildren = true; Log.e("Forum Fiend","aaa sub only on " + ca.category_id); } } if(subOnly) { if(map.containsKey("child")) { if(map.get("child") != null) { ca.category_id = subforum_id + "###" + (String) map.get("forum_id"); //Fix for forums with custom URL layouts ca.category_URL = "n/a"; if(!hashId.contentEquals("0")) { //Log.d("Forum Fiend","Building Child Array from " + map.get("child").toString()); ArrayList childArray = (ArrayList) map.get("child"); for(Object childForum : childArray) { LinkedTreeMap childMap = (LinkedTreeMap) childForum; String parentForum = (String) childMap.get("parent_id"); if(parentForum.contentEquals(hashId)) { ca = new Category(); ca.category_name = (String) childMap.get("forum_name"); ca.subforum_id = hashId; //ca.subforum_id = hashId; ca.category_id = (String) childMap.get("forum_id"); ca.categoryType = "S"; ca.categoryColor = background; if(childMap.containsKey("logo_url")) { if(childMap.get("logo_url") != null) { ca.categoryIcon = (String) childMap.get("logo_url"); } } if(childMap.get("new_post") != null) { ca.hasNewTopic = (Boolean) childMap.get("new_post"); } if(childMap.containsKey("sub_only")) { if(childMap.get("sub_only") != null) { ca.hasChildren = (Boolean) childMap.get("sub_only"); if(ca.hasChildren) { Log.e("Forum Fiend","bbb sub only on " + ca.category_id); if(childMap.containsKey("child")) { if(childMap.get("child") != null) { Log.e("Forum Fiend","jesus fuck even more children here for " + ca.category_id); } } } } } if(childMap.containsKey("url")) { if(childMap.get("url") != null) { ca.category_URL = (String) childMap.get("url"); } } if(childMap.get("is_subscribed") != null) { ca.isSubscribed = (Boolean) childMap.get("is_subscribed"); } if(childMap.get("can_subscribe") != null) { ca.canSubscribe = (Boolean) childMap.get("can_subscribe"); } if(childMap.containsKey("child")) { if(childMap.get("child") != null) { Log.e("Forum Fiend","children for " + ca.category_id); ca.hasChildren = true; ArrayList childerArray = (ArrayList) childMap.get("child"); for(Object childForumr : childerArray) { LinkedTreeMap childMapr = (LinkedTreeMap) childForumr; if(childMapr.get("new_post") != null) { boolean hasNew = (Boolean) childMapr.get("new_post"); if(hasNew) { ca.hasNewTopic = hasNew; } } } } } categoryList.add(ca); } } } } } } else { //Check for unread posts in children if(map.containsKey("child")) { if(map.get("child") != null) { ca.hasChildren = true; ArrayList childArray = (ArrayList) map.get("child"); for(Object childForum : childArray) { LinkedTreeMap childMap = (LinkedTreeMap) childForum; if(childMap.get("new_post") != null) { boolean hasNew = (Boolean) childMap.get("new_post"); if(hasNew) { ca.hasNewTopic = hasNew; } } } } } } if(hashId.contentEquals("0")) { categoryList.add(ca); } } } } */ //Non-Sticky Topics if(result[3] == null || result[3].length == 0) { canScrollMoreThreads = false; } for(Object o: result[3]) { if(o != null) { LinkedTreeMap map = (LinkedTreeMap) o; if(map.containsKey("topics")) { ArrayList topics = (ArrayList) map.get("topics"); for(Object t:topics) { LinkedTreeMap topicMap = (LinkedTreeMap) t; Category ca = new Category(); ca.category_name = (String) topicMap.get("topic_title"); if(topicMap.get("forum_id") != null) { ca.subforum_id = (String) topicMap.get("forum_id"); } else { ca.subforum_id = subforum_id; //if(!hashId.contentEquals("0")) { // ca.subforum_id = hashId; //} } ca.category_id = (String) topicMap.get("topic_id"); ca.category_lastupdate = (String) topicMap.get("last_reply_time"); if(topicMap.get("topic_author_name") != null) { ca.category_lastthread = (String) topicMap.get("topic_author_name"); } else { ca.category_lastthread = (String) topicMap.get("forum_name"); } ca.categoryType = "C"; ca.categoryColor = background; if(topicMap.get("reply_number") != null) { ca.thread_count = topicMap.get("reply_number").toString().replace(".0", ""); } if(topicMap.get("view_number") != null) { ca.view_count = topicMap.get("view_number").toString().replace(".0", ""); } //Log.d("Forum Fiend",(String) topicMap.get("reply_number")); if(topicMap.get("new_post") != null) { ca.hasNewTopic = (Boolean) topicMap.get("new_post"); } if(topicMap.containsKey("icon_url")) { if(topicMap.get("icon_url") != null) { ca.categoryIcon = (String) topicMap.get("icon_url"); } } if(topicMap.get("can_stick") != null) { ca.canSticky = (Boolean) topicMap.get("can_stick"); } if(topicMap.get("can_delete") != null) { ca.canDelete = (Boolean) topicMap.get("can_delete"); } if(topicMap.get("can_close") != null) { ca.canLock = (Boolean) topicMap.get("can_close"); } if(topicMap.get("is_closed") != null) { ca.isLocked = (Boolean) topicMap.get("is_closed"); } categoryList.add(ca); } } } } for(Object o: result[4]) { if(o != null) { Log.i("Forum Fiend","We have some favs!"); LinkedTreeMap map = (LinkedTreeMap) o; if(map.containsKey("forums")) { ArrayList forums = (ArrayList) map.get("forums"); for(Object f:forums) { LinkedTreeMap forumMap = (LinkedTreeMap) f; Category ca = new Category(); ca.category_name = (String) forumMap.get("forum_name"); ca.subforum_id = subforum_id; ca.category_id = (String) forumMap.get("forum_id"); ca.categoryType = "S"; ca.categoryColor = background; if(forumMap.containsKey("icon_url")) { if(forumMap.get("icon_url") != null) { ca.categoryIcon = (String) forumMap.get("icon_url"); } } ca.isSubscribed = true; if(forumMap.get("new_post") != null) { ca.hasNewTopic = (Boolean) forumMap.get("new_post"); } categoryList.add(ca); } } else { Log.e("Forum Fiend","Favs has no forums!"); } } else { } } setListAdapter(new CategoryAdapter(categoryList,activity,application)); registerForContextMenu(getListView()); getListView().setOnItemClickListener(new AdapterView.OnItemClickListener() { public void onItemClick(AdapterView<?> arg0, View arg1, int arg2,long arg3) { Category sender = (Category) arg0.getItemAtPosition(arg2); if(sender == null) { return; } if(categorySelected == null) { return; } categorySelected.onCategorySelected(sender); } }); getListView().setSelection(retainedPosition); initialParseDone = true; } /* private void setChatThread() { application.getSession().getServer().chatThread = clicked_category.category_id; application.getSession().getServer().chatForum = clicked_category.subforum_id; application.getSession().getServer().chatName = clicked_category.category_name; application.getSession().updateServer(); chatChanged.onChatChanged(application.getSession().getServer().chatThread); } */ public void onCreateContextMenu(ContextMenu menu, View v,ContextMenuInfo menuInfo) { String the_userid = application.getSession().getServer().serverUserId; AdapterContextMenuInfo info = (AdapterContextMenuInfo) menuInfo; clicked_category = (Category) CategoriesFragment.this.getListView().getItemAtPosition(info.position); if(the_userid.contentEquals("0")) return; super.onCreateContextMenu(menu, v, menuInfo); menu.setHeaderTitle(clicked_category.category_name); MenuInflater inflater = activity.getMenuInflater(); inflater.inflate(R.menu.categories_context, menu); MenuItem ubsubItem = menu.findItem(R.id.categories_unsubscribe); MenuItem subItem = menu.findItem(R.id.categories_subscribe); MenuItem stickyItem = menu.findItem(R.id.categories_context_sticky); MenuItem lockItem = menu.findItem(R.id.categories_context_lock); MenuItem deleteItem = menu.findItem(R.id.categories_context_delete); MenuItem subscribeItem = menu.findItem(R.id.categories_add_favorite); MenuItem unsubscribeItem = menu.findItem(R.id.categories_remove_favorite); if(clicked_category.categoryType.contentEquals("S")) { ubsubItem.setVisible(false); subItem.setVisible(false); stickyItem.setVisible(false); lockItem.setVisible(false); deleteItem.setVisible(false); if(clicked_category.canSubscribe) { subscribeItem.setVisible(true); } else { subscribeItem.setVisible(false); } if(clicked_category.isSubscribed) { unsubscribeItem.setVisible(true); subscribeItem.setVisible(false); } else { unsubscribeItem.setVisible(false); } } else { unsubscribeItem.setVisible(false); subscribeItem.setVisible(false); if(clicked_category.canSticky) { stickyItem.setVisible(true); if(clicked_category.topicSticky.contentEquals("N")) { stickyItem.setTitle("Stick Topic"); } else { stickyItem.setTitle("Unstick Topic"); } } else { stickyItem.setVisible(false); } if(clicked_category.canDelete) { deleteItem.setVisible(true); } else { deleteItem.setVisible(false); } if(clicked_category.canLock) { lockItem.setVisible(true); if(clicked_category.isLocked) { lockItem.setTitle("Unlock Topic"); } else { lockItem.setTitle("Lock Topic"); } } else { lockItem.setVisible(false); } if(subforum_id.contentEquals("favs")) { ubsubItem.setVisible(true); subItem.setVisible(false); } else { ubsubItem.setVisible(false); subItem.setVisible(true); } } } public boolean onContextItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.categories_unsubscribe: new unsubscribeTopic().execute(clicked_category.category_id); return true; case R.id.categories_subscribe: new subscribeTopic().execute(clicked_category.category_id); return true; case R.id.categories_context_sticky: if(clicked_category.topicSticky.contentEquals("N")) { new stickyTopic().execute(clicked_category.category_id,"1"); } else { new stickyTopic().execute(clicked_category.category_id,"2"); } return true; case R.id.categories_context_lock: if(clicked_category.isLocked) { new lockTopic().execute(clicked_category.category_id,"1"); } else { new lockTopic().execute(clicked_category.category_id,"2"); } return true; case R.id.categories_context_delete_yes: new deleteTopic().execute(clicked_category.category_id); return true; case R.id.categories_add_favorite: new addToFavorites().execute(clicked_category.category_id); return true; case R.id.categories_remove_favorite: new removeFromFavorites().execute(clicked_category.category_id); return true; default: return super.onContextItemSelected(item); } } @SuppressLint("NewApi") @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { if(userid != null) { if(!userid.contentEquals("0")) { inflater.inflate(R.menu.categories_menu, menu); } } super.onCreateOptionsMenu(menu, inflater); } @Override public void onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); if(userid != null) { if(!userid.contentEquals("0") && menu != null) { if(subforum_id.contentEquals("0") || subforum_id.contentEquals("participated") || subforum_id.contentEquals("favs") || subforum_id.contentEquals("search")) { MenuItem item = menu.findItem(R.id.cat_mark_read); if(item != null) { item.setVisible(false); } } else { MenuItem item = menu.findItem(R.id.cat_mark_read); if(item != null) { if(ForegroundColorSetter.getForegroundDark(background)) { item.setIcon(R.drawable.ic_action_read_dark); } } } if(subforum_id.contentEquals("0") || subforum_id.contentEquals("participated") || subforum_id.contentEquals("favs") || subforum_id.contentEquals("userrecent") || subforum_id.contentEquals("search")) { MenuItem item2 = menu.findItem(R.id.cat_new_thread); if(item2 != null) { item2.setVisible(false); } } else { MenuItem item2 = menu.findItem(R.id.cat_new_thread); if(item2 != null) { if(ForegroundColorSetter.getForegroundDark(background)) { item2.setIcon(R.drawable.ic_action_new_dark); } } } MenuItem browserItem = menu.findItem(R.id.cat_open_browser); if(shareURL.contentEquals("0")) { browserItem.setVisible(false); } else { browserItem.setVisible(true); } } } } @Override public boolean onOptionsItemSelected (MenuItem item) { switch (item.getItemId()) { case R.id.cat_new_thread: start_post(); return true; case R.id.cat_mark_read: markAsRead(); return true; case R.id.cat_open_browser: Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(shareURL)); startActivity(browserIntent); return true; default: return super.onOptionsItemSelected(item); } } private void start_post() { if(subforum_id.contentEquals("0") || userid.contentEquals("0")) { Toast toast = Toast.makeText(activity, "You are not allowed to post here!", Toast.LENGTH_LONG); toast.show(); return; } Intent myIntent = new Intent(activity, New_Post.class); Bundle bundle = new Bundle(); bundle.putString("postid",(String) "0"); bundle.putString("parent",(String) "0"); bundle.putString("category",(String) subforum_id); bundle.putString("subforum_id",(String) subforum_id); bundle.putString("original_text",(String) ""); bundle.putString("boxTitle",(String) "New Thread"); bundle.putString("picture",(String) "0"); bundle.putString("subject",(String) ""); bundle.putInt("post_type",(Integer) 1); bundle.putString("color",(String) background); myIntent.putExtras(bundle); startActivity(myIntent); } //Category Selected Interface public interface onCategorySelectedListener { public abstract void onCategorySelected(Category ca); } private onCategorySelectedListener categorySelected = null; public void setOnCategorySelectedListener(onCategorySelectedListener l) { categorySelected = l; } private void markAsRead() { new readMarker().execute(subforum_id); } private class readMarker extends AsyncTask<String, Void, String> { @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected String doInBackground(String... params) { if(activity == null) { return null; } String result = ""; try { Vector paramz; paramz = new Vector(); if(!params[0].contentEquals("0") && !params[0].contentEquals("unread")) { paramz.addElement(params[0]); } //application.getSession().performSynchronousCall("mark_all_as_read", paramz); application.getSession().performNewSynchronousCall("mark_all_as_read", paramz); } catch (Exception ex) { Log.w("Discussions", ex.getMessage()); } return result; } protected void onPostExecute(final String result) { if(activity == null) { return; } if(subforum_id.contentEquals("unread")) { activity.finish(); }else { load_categories(); Toast toast = Toast.makeText(activity, "Posts marked read!", Toast.LENGTH_LONG); toast.show(); } } } private class subscribeTopic extends AsyncTask<String, Void, String> { @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected String doInBackground(String... params) { if(activity == null) { return null; } String result = ""; try { Vector paramz; paramz = new Vector(); paramz.addElement(params[0]); //application.getSession().performSynchronousCall("subscribe_topic", paramz); application.getSession().performNewSynchronousCall("subscribe_topic", paramz); } catch (Exception ex) { Log.w("Discussions", ex.getMessage()); } return result; } protected void onPostExecute(final String result) { if(activity == null) { return; } Toast toast = Toast.makeText(activity, "Subscribed!", Toast.LENGTH_SHORT); toast.show(); } } private class unsubscribeTopic extends AsyncTask<String, Void, String> { @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected String doInBackground(String... params) { if(activity == null) { return null; } String result = ""; try { Vector paramz; paramz = new Vector(); paramz.addElement(params[0]); //application.getSession().performSynchronousCall("unsubscribe_topic", paramz); application.getSession().performNewSynchronousCall("unsubscribe_topic", paramz); } catch (Exception ex) { Log.w("Discussions", ex.getMessage()); } return result; } protected void onPostExecute(final String result) { if(activity == null) { return; } load_categories(); } } private class stickyTopic extends AsyncTask<String, Void, String> { // parm[0] - (string)topic_id // parm[1] - (int)mode (1 - stick; 2 - unstick) @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected String doInBackground(String... params) { if(activity == null) { return null; } String result = ""; try { Vector paramz; paramz = new Vector(); paramz.addElement(params[0]); paramz.addElement(Integer.parseInt(params[1])); //application.getSession().performSynchronousCall("m_stick_topic", paramz); application.getSession().performNewSynchronousCall("m_stick_topic", paramz); } catch (Exception ex) { Log.w("Forum Fiend", ex.getMessage()); } return result; } protected void onPostExecute(final String result) { if(activity == null) { return; } load_categories(); } } private class lockTopic extends AsyncTask<String, Void, String> { // parm[0] - (string)topic_id // parm[1] - (int)mode (1 - unlock; 2 - lock) @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected String doInBackground(String... params) { if(activity == null) { return null; } String result = ""; try { Vector paramz; paramz = new Vector(); paramz.addElement(params[0]); paramz.addElement(Integer.parseInt(params[1])); //application.getSession().performSynchronousCall("m_close_topic", paramz); application.getSession().performNewSynchronousCall("m_close_topic", paramz); } catch (Exception ex) { Log.w("Forum Fiend", ex.getMessage()); } return result; } protected void onPostExecute(final String result) { if(activity == null) { return; } load_categories(); } } private class deleteTopic extends AsyncTask<String, Void, String> { // parm[0] - (string)topic_id @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected String doInBackground(String... params) { if(activity == null) { return null; } String result = ""; try { Vector paramz; paramz = new Vector(); paramz.addElement(params[0]); paramz.addElement(2); //application.getSession().performSynchronousCall("m_delete_topic", paramz); application.getSession().performNewSynchronousCall("m_delete_topic", paramz); } catch (Exception ex) { Log.w("Forum Fiend", ex.getMessage()); } return result; } protected void onPostExecute(final String result) { if(activity == null) { return; } load_categories(); } } private OnScrollListener listScrolled = new OnScrollListener() { @Override public void onScroll(AbsListView arg0, int arg1, int arg2, int arg3) { //do nothing } @Override public void onScrollStateChanged(AbsListView arg0, int arg1) { if(!canScrollMoreThreads || isLoading) { return; } if(categoryList == null) { return; } if(categoryList.size() < 20) { return; } if(!initialLoadComplete) { return; } if(arg1 == SCROLL_STATE_IDLE) { if(arg0.getLastVisiblePosition() >= categoryList.size() - 5) { isExtraScrolling = true; startingPos = endingPos + 1; endingPos = startingPos + 20; categoriesDownloader = new downloadCategories(); categoriesDownloader.execute(); } } } }; private class addToFavorites extends AsyncTask<String, Void, String> { @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected String doInBackground(String... params) { if(activity == null) { return null; } String result = ""; try { Vector paramz; paramz = new Vector(); paramz.addElement(params[0]); //application.getSession().performSynchronousCall("subscribe_forum", paramz); application.getSession().performNewSynchronousCall("subscribe_forum", paramz); } catch (Exception ex) { Log.w("Discussions", ex.getMessage()); } return result; } protected void onPostExecute(final String result) { if(activity == null) { return; } Toast toast = Toast.makeText(activity, "Forum added to favorites!", Toast.LENGTH_SHORT); toast.show(); load_categories(); } } private class removeFromFavorites extends AsyncTask<String, Void, String> { @SuppressLint("UseValueOf") @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected String doInBackground(String... params) { if(activity == null) { return null; } String result = ""; try { Vector paramz; paramz = new Vector(); paramz.addElement(params[0]); //application.getSession().performSynchronousCall("unsubscribe_forum", paramz); application.getSession().performNewSynchronousCall("unsubscribe_forum", paramz); } catch (Exception ex) { Log.w("Discussions", ex.getMessage()); } return result; } protected void onPostExecute(final String result) { if(activity == null) { return; } Toast toast = Toast.makeText(activity, "Forum removed from favorites!", Toast.LENGTH_SHORT); toast.show(); load_categories(); } } }
/** */ package CIM15.IEC61970.Meas; import java.util.Collection; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.util.BasicInternalEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Discrete</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link CIM15.IEC61970.Meas.Discrete#getNormalValue <em>Normal Value</em>}</li> * <li>{@link CIM15.IEC61970.Meas.Discrete#getMinValue <em>Min Value</em>}</li> * <li>{@link CIM15.IEC61970.Meas.Discrete#getCommand <em>Command</em>}</li> * <li>{@link CIM15.IEC61970.Meas.Discrete#getDiscreteValues <em>Discrete Values</em>}</li> * <li>{@link CIM15.IEC61970.Meas.Discrete#getValueAliasSet <em>Value Alias Set</em>}</li> * <li>{@link CIM15.IEC61970.Meas.Discrete#getMaxValue <em>Max Value</em>}</li> * </ul> * </p> * * @generated */ public class Discrete extends Measurement { /** * The default value of the '{@link #getNormalValue() <em>Normal Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getNormalValue() * @generated * @ordered */ protected static final int NORMAL_VALUE_EDEFAULT = 0; /** * The cached value of the '{@link #getNormalValue() <em>Normal Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getNormalValue() * @generated * @ordered */ protected int normalValue = NORMAL_VALUE_EDEFAULT; /** * This is true if the Normal Value attribute has been set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ protected boolean normalValueESet; /** * The default value of the '{@link #getMinValue() <em>Min Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMinValue() * @generated * @ordered */ protected static final int MIN_VALUE_EDEFAULT = 0; /** * The cached value of the '{@link #getMinValue() <em>Min Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMinValue() * @generated * @ordered */ protected int minValue = MIN_VALUE_EDEFAULT; /** * This is true if the Min Value attribute has been set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ protected boolean minValueESet; /** * The cached value of the '{@link #getCommand() <em>Command</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCommand() * @generated * @ordered */ protected Command command; /** * The cached value of the '{@link #getDiscreteValues() <em>Discrete Values</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDiscreteValues() * @generated * @ordered */ protected EList<DiscreteValue> discreteValues; /** * The cached value of the '{@link #getValueAliasSet() <em>Value Alias Set</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValueAliasSet() * @generated * @ordered */ protected ValueAliasSet valueAliasSet; /** * The default value of the '{@link #getMaxValue() <em>Max Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMaxValue() * @generated * @ordered */ protected static final int MAX_VALUE_EDEFAULT = 0; /** * The cached value of the '{@link #getMaxValue() <em>Max Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMaxValue() * @generated * @ordered */ protected int maxValue = MAX_VALUE_EDEFAULT; /** * This is true if the Max Value attribute has been set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ protected boolean maxValueESet; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Discrete() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return MeasPackage.Literals.DISCRETE; } /** * Returns the value of the '<em><b>Normal Value</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Normal Value</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Normal Value</em>' attribute. * @see #isSetNormalValue() * @see #unsetNormalValue() * @see #setNormalValue(int) * @generated */ public int getNormalValue() { return normalValue; } /** * Sets the value of the '{@link CIM15.IEC61970.Meas.Discrete#getNormalValue <em>Normal Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Normal Value</em>' attribute. * @see #isSetNormalValue() * @see #unsetNormalValue() * @see #getNormalValue() * @generated */ public void setNormalValue(int newNormalValue) { normalValue = newNormalValue; normalValueESet = true; } /** * Unsets the value of the '{@link CIM15.IEC61970.Meas.Discrete#getNormalValue <em>Normal Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isSetNormalValue() * @see #getNormalValue() * @see #setNormalValue(int) * @generated */ public void unsetNormalValue() { normalValue = NORMAL_VALUE_EDEFAULT; normalValueESet = false; } /** * Returns whether the value of the '{@link CIM15.IEC61970.Meas.Discrete#getNormalValue <em>Normal Value</em>}' attribute is set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return whether the value of the '<em>Normal Value</em>' attribute is set. * @see #unsetNormalValue() * @see #getNormalValue() * @see #setNormalValue(int) * @generated */ public boolean isSetNormalValue() { return normalValueESet; } /** * Returns the value of the '<em><b>Min Value</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Min Value</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Min Value</em>' attribute. * @see #isSetMinValue() * @see #unsetMinValue() * @see #setMinValue(int) * @generated */ public int getMinValue() { return minValue; } /** * Sets the value of the '{@link CIM15.IEC61970.Meas.Discrete#getMinValue <em>Min Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Min Value</em>' attribute. * @see #isSetMinValue() * @see #unsetMinValue() * @see #getMinValue() * @generated */ public void setMinValue(int newMinValue) { minValue = newMinValue; minValueESet = true; } /** * Unsets the value of the '{@link CIM15.IEC61970.Meas.Discrete#getMinValue <em>Min Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isSetMinValue() * @see #getMinValue() * @see #setMinValue(int) * @generated */ public void unsetMinValue() { minValue = MIN_VALUE_EDEFAULT; minValueESet = false; } /** * Returns whether the value of the '{@link CIM15.IEC61970.Meas.Discrete#getMinValue <em>Min Value</em>}' attribute is set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return whether the value of the '<em>Min Value</em>' attribute is set. * @see #unsetMinValue() * @see #getMinValue() * @see #setMinValue(int) * @generated */ public boolean isSetMinValue() { return minValueESet; } /** * Returns the value of the '<em><b>Command</b></em>' reference. * It is bidirectional and its opposite is '{@link CIM15.IEC61970.Meas.Command#getDiscrete <em>Discrete</em>}'. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Command</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Command</em>' reference. * @see #setCommand(Command) * @see CIM15.IEC61970.Meas.Command#getDiscrete * @generated */ public Command getCommand() { if (command != null && command.eIsProxy()) { InternalEObject oldCommand = (InternalEObject)command; command = (Command)eResolveProxy(oldCommand); if (command != oldCommand) { } } return command; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Command basicGetCommand() { return command; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetCommand(Command newCommand, NotificationChain msgs) { Command oldCommand = command; command = newCommand; return msgs; } /** * Sets the value of the '{@link CIM15.IEC61970.Meas.Discrete#getCommand <em>Command</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Command</em>' reference. * @see #getCommand() * @generated */ public void setCommand(Command newCommand) { if (newCommand != command) { NotificationChain msgs = null; if (command != null) msgs = ((InternalEObject)command).eInverseRemove(this, MeasPackage.COMMAND__DISCRETE, Command.class, msgs); if (newCommand != null) msgs = ((InternalEObject)newCommand).eInverseAdd(this, MeasPackage.COMMAND__DISCRETE, Command.class, msgs); msgs = basicSetCommand(newCommand, msgs); if (msgs != null) msgs.dispatch(); } } /** * Returns the value of the '<em><b>Discrete Values</b></em>' reference list. * The list contents are of type {@link CIM15.IEC61970.Meas.DiscreteValue}. * It is bidirectional and its opposite is '{@link CIM15.IEC61970.Meas.DiscreteValue#getDiscrete <em>Discrete</em>}'. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Discrete Values</em>' reference list isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Discrete Values</em>' reference list. * @see CIM15.IEC61970.Meas.DiscreteValue#getDiscrete * @generated */ public EList<DiscreteValue> getDiscreteValues() { if (discreteValues == null) { discreteValues = new BasicInternalEList<DiscreteValue>(DiscreteValue.class); } return discreteValues; } /** * Returns the value of the '<em><b>Value Alias Set</b></em>' reference. * It is bidirectional and its opposite is '{@link CIM15.IEC61970.Meas.ValueAliasSet#getDiscretes <em>Discretes</em>}'. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Value Alias Set</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Value Alias Set</em>' reference. * @see #setValueAliasSet(ValueAliasSet) * @see CIM15.IEC61970.Meas.ValueAliasSet#getDiscretes * @generated */ public ValueAliasSet getValueAliasSet() { if (valueAliasSet != null && valueAliasSet.eIsProxy()) { InternalEObject oldValueAliasSet = (InternalEObject)valueAliasSet; valueAliasSet = (ValueAliasSet)eResolveProxy(oldValueAliasSet); if (valueAliasSet != oldValueAliasSet) { } } return valueAliasSet; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ValueAliasSet basicGetValueAliasSet() { return valueAliasSet; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetValueAliasSet(ValueAliasSet newValueAliasSet, NotificationChain msgs) { ValueAliasSet oldValueAliasSet = valueAliasSet; valueAliasSet = newValueAliasSet; return msgs; } /** * Sets the value of the '{@link CIM15.IEC61970.Meas.Discrete#getValueAliasSet <em>Value Alias Set</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Value Alias Set</em>' reference. * @see #getValueAliasSet() * @generated */ public void setValueAliasSet(ValueAliasSet newValueAliasSet) { if (newValueAliasSet != valueAliasSet) { NotificationChain msgs = null; if (valueAliasSet != null) msgs = ((InternalEObject)valueAliasSet).eInverseRemove(this, MeasPackage.VALUE_ALIAS_SET__DISCRETES, ValueAliasSet.class, msgs); if (newValueAliasSet != null) msgs = ((InternalEObject)newValueAliasSet).eInverseAdd(this, MeasPackage.VALUE_ALIAS_SET__DISCRETES, ValueAliasSet.class, msgs); msgs = basicSetValueAliasSet(newValueAliasSet, msgs); if (msgs != null) msgs.dispatch(); } } /** * Returns the value of the '<em><b>Max Value</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Max Value</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Max Value</em>' attribute. * @see #isSetMaxValue() * @see #unsetMaxValue() * @see #setMaxValue(int) * @generated */ public int getMaxValue() { return maxValue; } /** * Sets the value of the '{@link CIM15.IEC61970.Meas.Discrete#getMaxValue <em>Max Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Max Value</em>' attribute. * @see #isSetMaxValue() * @see #unsetMaxValue() * @see #getMaxValue() * @generated */ public void setMaxValue(int newMaxValue) { maxValue = newMaxValue; maxValueESet = true; } /** * Unsets the value of the '{@link CIM15.IEC61970.Meas.Discrete#getMaxValue <em>Max Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isSetMaxValue() * @see #getMaxValue() * @see #setMaxValue(int) * @generated */ public void unsetMaxValue() { maxValue = MAX_VALUE_EDEFAULT; maxValueESet = false; } /** * Returns whether the value of the '{@link CIM15.IEC61970.Meas.Discrete#getMaxValue <em>Max Value</em>}' attribute is set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return whether the value of the '<em>Max Value</em>' attribute is set. * @see #unsetMaxValue() * @see #getMaxValue() * @see #setMaxValue(int) * @generated */ public boolean isSetMaxValue() { return maxValueESet; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MeasPackage.DISCRETE__COMMAND: if (command != null) msgs = ((InternalEObject)command).eInverseRemove(this, MeasPackage.COMMAND__DISCRETE, Command.class, msgs); return basicSetCommand((Command)otherEnd, msgs); case MeasPackage.DISCRETE__DISCRETE_VALUES: return ((InternalEList<InternalEObject>)(InternalEList<?>)getDiscreteValues()).basicAdd(otherEnd, msgs); case MeasPackage.DISCRETE__VALUE_ALIAS_SET: if (valueAliasSet != null) msgs = ((InternalEObject)valueAliasSet).eInverseRemove(this, MeasPackage.VALUE_ALIAS_SET__DISCRETES, ValueAliasSet.class, msgs); return basicSetValueAliasSet((ValueAliasSet)otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MeasPackage.DISCRETE__COMMAND: return basicSetCommand(null, msgs); case MeasPackage.DISCRETE__DISCRETE_VALUES: return ((InternalEList<?>)getDiscreteValues()).basicRemove(otherEnd, msgs); case MeasPackage.DISCRETE__VALUE_ALIAS_SET: return basicSetValueAliasSet(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case MeasPackage.DISCRETE__NORMAL_VALUE: return getNormalValue(); case MeasPackage.DISCRETE__MIN_VALUE: return getMinValue(); case MeasPackage.DISCRETE__COMMAND: if (resolve) return getCommand(); return basicGetCommand(); case MeasPackage.DISCRETE__DISCRETE_VALUES: return getDiscreteValues(); case MeasPackage.DISCRETE__VALUE_ALIAS_SET: if (resolve) return getValueAliasSet(); return basicGetValueAliasSet(); case MeasPackage.DISCRETE__MAX_VALUE: return getMaxValue(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case MeasPackage.DISCRETE__NORMAL_VALUE: setNormalValue((Integer)newValue); return; case MeasPackage.DISCRETE__MIN_VALUE: setMinValue((Integer)newValue); return; case MeasPackage.DISCRETE__COMMAND: setCommand((Command)newValue); return; case MeasPackage.DISCRETE__DISCRETE_VALUES: getDiscreteValues().clear(); getDiscreteValues().addAll((Collection<? extends DiscreteValue>)newValue); return; case MeasPackage.DISCRETE__VALUE_ALIAS_SET: setValueAliasSet((ValueAliasSet)newValue); return; case MeasPackage.DISCRETE__MAX_VALUE: setMaxValue((Integer)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case MeasPackage.DISCRETE__NORMAL_VALUE: unsetNormalValue(); return; case MeasPackage.DISCRETE__MIN_VALUE: unsetMinValue(); return; case MeasPackage.DISCRETE__COMMAND: setCommand((Command)null); return; case MeasPackage.DISCRETE__DISCRETE_VALUES: getDiscreteValues().clear(); return; case MeasPackage.DISCRETE__VALUE_ALIAS_SET: setValueAliasSet((ValueAliasSet)null); return; case MeasPackage.DISCRETE__MAX_VALUE: unsetMaxValue(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case MeasPackage.DISCRETE__NORMAL_VALUE: return isSetNormalValue(); case MeasPackage.DISCRETE__MIN_VALUE: return isSetMinValue(); case MeasPackage.DISCRETE__COMMAND: return command != null; case MeasPackage.DISCRETE__DISCRETE_VALUES: return discreteValues != null && !discreteValues.isEmpty(); case MeasPackage.DISCRETE__VALUE_ALIAS_SET: return valueAliasSet != null; case MeasPackage.DISCRETE__MAX_VALUE: return isSetMaxValue(); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (normalValue: "); if (normalValueESet) result.append(normalValue); else result.append("<unset>"); result.append(", minValue: "); if (minValueESet) result.append(minValue); else result.append("<unset>"); result.append(", maxValue: "); if (maxValueESet) result.append(maxValue); else result.append("<unset>"); result.append(')'); return result.toString(); } } // Discrete
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.manager.test.server; import io.apiman.common.config.SystemPropertiesConfiguration; import io.apiman.common.util.crypt.CurrentDataEncrypter; import io.apiman.common.util.crypt.IDataEncrypter; import io.apiman.manager.api.beans.apis.EndpointType; import io.apiman.manager.api.beans.idm.UserBean; import io.apiman.manager.api.beans.summary.AvailableApiBean; import io.apiman.manager.api.core.IApiCatalog; import io.apiman.manager.api.core.IApiKeyGenerator; import io.apiman.manager.api.core.IMetricsAccessor; import io.apiman.manager.api.core.INewUserBootstrapper; import io.apiman.manager.api.core.IPluginRegistry; import io.apiman.manager.api.core.IStorage; import io.apiman.manager.api.core.IStorageQuery; import io.apiman.manager.api.core.UuidApiKeyGenerator; import io.apiman.manager.api.core.crypt.DefaultDataEncrypter; import io.apiman.manager.api.core.exceptions.StorageException; import io.apiman.manager.api.core.logging.ApimanLogger; import io.apiman.manager.api.core.logging.IApimanLogger; import io.apiman.manager.api.core.logging.StandardLoggerImpl; import io.apiman.manager.api.es.ESMetricsAccessor; import io.apiman.manager.api.es.EsStorage; import io.apiman.manager.api.jpa.IJpaProperties; import io.apiman.manager.api.jpa.JpaStorage; import io.apiman.manager.api.security.ISecurityContext; import io.apiman.manager.api.security.impl.DefaultSecurityContext; import io.apiman.manager.test.util.ManagerTestUtils; import io.apiman.manager.test.util.ManagerTestUtils.TestType; import io.searchbox.client.JestClient; import io.searchbox.client.JestClientFactory; import io.searchbox.client.config.HttpClientConfig; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.New; import javax.enterprise.inject.Produces; import javax.enterprise.inject.spi.InjectionPoint; import javax.inject.Named; /** * Attempt to create producer methods for CDI beans. * * @author eric.wittmann@redhat.com */ @ApplicationScoped @SuppressWarnings("nls") @Named("ApimanLogFactory") public class TestCdiFactory { private static final int JEST_TIMEOUT = 6000; @Produces @ApplicationScoped public static ISecurityContext provideSecurityContext(@New DefaultSecurityContext defaultSC) { return defaultSC; } @Produces @ApimanLogger public static IApimanLogger provideLogger(InjectionPoint injectionPoint) { ApimanLogger logger = injectionPoint.getAnnotated().getAnnotation(ApimanLogger.class); Class<?> klazz = logger.value(); return new StandardLoggerImpl().createLogger(klazz); } @Produces @ApplicationScoped public static INewUserBootstrapper provideNewUserBootstrapper() { return new INewUserBootstrapper() { @Override public void bootstrapUser(UserBean user, IStorage storage) throws StorageException { // Do nothing special. } }; } @Produces @ApplicationScoped public static IJpaProperties provideJpaProperties() { return new IJpaProperties() { @Override public Map<String, String> getAllHibernateProperties() { SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); Map<String, String> rval = new HashMap<>(); Iterator<String> keys = config.getKeys(); while (keys.hasNext()) { String key = keys.next(); if (key.startsWith("apiman.hibernate.")) { //$NON-NLS-1$ String value = config.getString(key); key = key.substring("apiman.".length()); //$NON-NLS-1$ rval.put(key, value); } } return rval; } }; } @Produces @ApplicationScoped public static IStorage provideStorage(@New JpaStorage jpaStorage, @New EsStorage esStorage) { TestType testType = ManagerTestUtils.getTestType(); if (testType == TestType.jpa) { return jpaStorage; } else if (testType == TestType.es) { esStorage.initialize(); return new TestEsStorageWrapper(ManagerApiTestServer.ES_CLIENT, esStorage); } else { throw new RuntimeException("Unexpected test type: " + testType); } } @Produces @ApplicationScoped public static IStorageQuery provideStorageQuery(@New JpaStorage jpaStorage, @New EsStorage esStorage) { TestType testType = ManagerTestUtils.getTestType(); if (testType == TestType.jpa) { return jpaStorage; } else if (testType == TestType.es) { esStorage.initialize(); return new TestEsStorageQueryWrapper(ManagerApiTestServer.ES_CLIENT, esStorage); } else { throw new RuntimeException("Unexpected test type: " + testType); } } @Produces @ApplicationScoped public static IApiKeyGenerator provideApiKeyGenerator(@New UuidApiKeyGenerator uuidApiKeyGen) { return uuidApiKeyGen; } @Produces @ApplicationScoped public static IDataEncrypter provideDataEncrypter(@New DefaultDataEncrypter defaultEncrypter) { CurrentDataEncrypter.instance = defaultEncrypter; return defaultEncrypter; } @Produces @ApplicationScoped public static IApiCatalog provideApiCatalog(IPluginRegistry pluginRegistry) { return new IApiCatalog() { @Override public List<AvailableApiBean> search(String keyword) { List<AvailableApiBean> rval = new ArrayList<>(); AvailableApiBean asb = new AvailableApiBean(); asb.setName("Test API 1"); asb.setDescription("The first test API."); asb.setEndpoint("http://api1.example.org/api"); asb.setEndpointType(EndpointType.rest); rval.add(asb); asb = new AvailableApiBean(); asb.setName("Test API 2"); asb.setDescription("The second test API."); asb.setEndpoint("http://api2.example.org/api"); asb.setEndpointType(EndpointType.rest); rval.add(asb); return rval; } }; } @Produces @ApplicationScoped @Named("storage") public static JestClient provideStorageJestClient() { TestType testType = ManagerTestUtils.getTestType(); if (testType == TestType.jpa) { return null; } else if (testType == TestType.es) { return ManagerApiTestServer.ES_CLIENT; } else { throw new RuntimeException("Unexpected test type: " + testType); } } @Produces @ApplicationScoped @Named("metrics") public static JestClient provideMetricsJestClient() { boolean enableESMetrics = "true".equals(System.getProperty("apiman-test.es-metrics", "false")); if (enableESMetrics) { String host = System.getProperty("apiman-test.es-metrics.host", "localhost"); String port = System.getProperty("apiman-test.es-metrics.port", "9200"); String connectionUrl = "http://" + host + ":" + port + ""; JestClientFactory factory = new JestClientFactory(); factory.setHttpClientConfig(new HttpClientConfig.Builder(connectionUrl).multiThreaded(true). connTimeout(JEST_TIMEOUT).readTimeout(JEST_TIMEOUT).build()); return factory.getObject(); } else { return null; } } @Produces @ApplicationScoped public static IMetricsAccessor provideMetricsAccessor(@New TestMetricsAccessor testMetrics, @New ESMetricsAccessor esMetrics) { boolean enableESMetrics = "true".equals(System.getProperty("apiman-test.es-metrics", "false")); if (enableESMetrics) { return esMetrics; } else { return testMetrics; } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.wm.impl; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.actions.ContextHelpAction; import com.intellij.ide.actions.ResizeToolWindowAction; import com.intellij.ide.actions.ToggleToolbarAction; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Queryable; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.*; import com.intellij.ui.Gray; import com.intellij.ui.JBColor; import com.intellij.ui.UIBundle; import com.intellij.ui.components.panels.NonOpaquePanel; import com.intellij.ui.content.Content; import com.intellij.util.EventDispatcher; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.accessibility.AccessibleContext; import javax.swing.*; import javax.swing.border.Border; import javax.swing.event.AncestorEvent; import javax.swing.event.AncestorListener; import java.awt.*; import java.awt.event.*; import java.util.Map; /** * @author Eugene Belyaev * @author Vladimir Kondratyev */ public final class InternalDecorator extends JPanel implements Queryable, DataProvider { private Project myProject; private WindowInfoImpl myInfo; private final ToolWindowImpl myToolWindow; private final MyDivider myDivider; private final EventDispatcher<InternalDecoratorListener> myDispatcher = EventDispatcher.create(InternalDecoratorListener.class); /* * Actions */ private final TogglePinnedModeAction myToggleAutoHideModeAction; private final ToggleDockModeAction myToggleDockModeAction; private final ToggleFloatingModeAction myToggleFloatingModeAction; private final ToggleWindowedModeAction myToggleWindowedModeAction; private final ToggleSideModeAction myToggleSideModeAction; private final ToggleContentUiTypeAction myToggleContentUiTypeAction; private final RemoveStripeButtonAction myHideStripeButtonAction; private ActionGroup myAdditionalGearActions; /** * Catches all event from tool window and modifies decorator's appearance. */ @NonNls static final String HIDE_ACTIVE_WINDOW_ACTION_ID = "HideActiveWindow"; @NonNls public static final String TOGGLE_PINNED_MODE_ACTION_ID = "TogglePinnedMode"; @NonNls public static final String TOGGLE_DOCK_MODE_ACTION_ID = "ToggleDockMode"; @NonNls public static final String TOGGLE_FLOATING_MODE_ACTION_ID = "ToggleFloatingMode"; @NonNls public static final String TOGGLE_WINDOWED_MODE_ACTION_ID = "ToggleWindowedMode"; @NonNls public static final String TOGGLE_SIDE_MODE_ACTION_ID = "ToggleSideMode"; @NonNls private static final String TOGGLE_CONTENT_UI_TYPE_ACTION_ID = "ToggleContentUiTypeMode"; private ToolWindowHeader myHeader; private final ActionGroup myToggleToolbarGroup; InternalDecorator(final Project project, @NotNull WindowInfoImpl info, final ToolWindowImpl toolWindow, boolean dumbAware) { super(new BorderLayout()); myProject = project; myToolWindow = toolWindow; myToolWindow.setDecorator(this); myDivider = new MyDivider(); myToggleFloatingModeAction = new ToggleFloatingModeAction(); myToggleWindowedModeAction = new ToggleWindowedModeAction(); myToggleSideModeAction = new ToggleSideModeAction(); myToggleDockModeAction = new ToggleDockModeAction(); myToggleAutoHideModeAction = new TogglePinnedModeAction(); myToggleContentUiTypeAction = new ToggleContentUiTypeAction(); myHideStripeButtonAction = new RemoveStripeButtonAction(); myToggleToolbarGroup = ToggleToolbarAction.createToggleToolbarGroup(myProject, myToolWindow); setFocusable(false); setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); myHeader = new ToolWindowHeader(toolWindow, () -> createPopupGroup(true)) { @Override protected boolean isActive() { return myToolWindow.isActive(); } @Override protected void hideToolWindow() { fireHidden(); } @Override protected void toolWindowTypeChanged(@NotNull ToolWindowType type) { fireTypeChanged(type); } @Override protected void sideHidden() { fireHiddenSide(); } }; init(dumbAware); apply(info); } @Override public String toString() { return myToolWindow.getId(); } public boolean isFocused() { IdeFocusManager fm = IdeFocusManager.getInstance(myProject); Component component = fm.getFocusedDescendantFor(myToolWindow.getComponent()); if (component != null) return true; Component owner = fm.getLastFocusedFor(WindowManager.getInstance().getIdeFrame(myProject)); return owner != null && SwingUtilities.isDescendingFrom(owner, myToolWindow.getComponent()); } /** * Applies specified decoration. */ public final void apply(@NotNull WindowInfoImpl info) { if (Comparing.equal(myInfo, info) || myProject == null || myProject.isDisposed()) { return; } myInfo = info; // Anchor final ToolWindowAnchor anchor = myInfo.getAnchor(); if (info.isSliding()) { myDivider.invalidate(); if (ToolWindowAnchor.TOP == anchor) { add(myDivider, BorderLayout.SOUTH); } else if (ToolWindowAnchor.LEFT == anchor) { add(myDivider, BorderLayout.EAST); } else if (ToolWindowAnchor.BOTTOM == anchor) { add(myDivider, BorderLayout.NORTH); } else if (ToolWindowAnchor.RIGHT == anchor) { add(myDivider, BorderLayout.WEST); } myDivider.setPreferredSize(new Dimension(0, 0)); } else { // docked and floating windows don't have divider remove(myDivider); } validate(); repaint(); // Push "apply" request forward if (myInfo.isFloating() && myInfo.isVisible()) { final FloatingDecorator floatingDecorator = (FloatingDecorator)SwingUtilities.getAncestorOfClass(FloatingDecorator.class, this); if (floatingDecorator != null) { floatingDecorator.apply(myInfo); } } myToolWindow.getContentUI().setType(myInfo.getContentUiType()); setBorder(new InnerPanelBorder(myToolWindow)); } @Nullable @Override public Object getData(@NotNull @NonNls String dataId) { if (PlatformDataKeys.TOOL_WINDOW.is(dataId)) { return myToolWindow; } return null; } final void addInternalDecoratorListener(InternalDecoratorListener l) { myDispatcher.addListener(l); } final void removeInternalDecoratorListener(InternalDecoratorListener l) { myDispatcher.removeListener(l); } final void dispose() { removeAll(); Disposer.dispose(myHeader); myHeader = null; myProject = null; } private void fireAnchorChanged(@NotNull ToolWindowAnchor anchor) { myDispatcher.getMulticaster().anchorChanged(this, anchor); } private void fireAutoHideChanged(boolean autoHide) { myDispatcher.getMulticaster().autoHideChanged(this, autoHide); } /** * Fires event that "hide" button has been pressed. */ final void fireHidden() { myDispatcher.getMulticaster().hidden(this); } /** * Fires event that "hide" button has been pressed. */ final void fireHiddenSide() { myDispatcher.getMulticaster().hiddenSide(this); } /** * Fires event that user performed click into the title bar area. */ final void fireActivated() { myDispatcher.getMulticaster().activated(this); } private void fireTypeChanged(@NotNull ToolWindowType type) { myDispatcher.getMulticaster().typeChanged(this, type); } final void fireResized() { myDispatcher.getMulticaster().resized(this); } private void fireSideStatusChanged(boolean isSide) { myDispatcher.getMulticaster().sideStatusChanged(this, isSide); } private void fireContentUiTypeChanges(@NotNull ToolWindowContentUiType type) { myDispatcher.getMulticaster().contentUiTypeChanges(this, type); } private void fireVisibleOnPanelChanged(final boolean visibleOnPanel) { myDispatcher.getMulticaster().visibleStripeButtonChanged(this, visibleOnPanel); } private void init(boolean dumbAware) { enableEvents(AWTEvent.COMPONENT_EVENT_MASK); final JPanel contentPane = new JPanel(new BorderLayout()); installFocusTraversalPolicy(contentPane, new LayoutFocusTraversalPolicy()); contentPane.add(myHeader, BorderLayout.NORTH); JPanel innerPanel = new JPanel(new BorderLayout()); JComponent toolWindowComponent = myToolWindow.getComponent(); if (!dumbAware) { toolWindowComponent = DumbService.getInstance(myProject).wrapGently(toolWindowComponent, myProject); } innerPanel.add(toolWindowComponent, BorderLayout.CENTER); final NonOpaquePanel inner = new NonOpaquePanel(innerPanel); contentPane.add(inner, BorderLayout.CENTER); add(contentPane, BorderLayout.CENTER); if (SystemInfo.isMac) { setBackground(new JBColor(Gray._200, Gray._90)); } AncestorListener ancestorListener = new AncestorListener() { private final static String FOCUS_EDITOR_ACTION_KEY = "FOCUS_EDITOR_ACTION_KEY"; @Override public void ancestorAdded(AncestorEvent event) { registerEscapeAction(); } @Override public void ancestorMoved(AncestorEvent event) {} private void registerEscapeAction() { getInputMap(WHEN_ANCESTOR_OF_FOCUSED_COMPONENT).put(KeyStroke.getKeyStroke( KeyEvent.VK_ESCAPE, 0),FOCUS_EDITOR_ACTION_KEY); getActionMap().put(FOCUS_EDITOR_ACTION_KEY, new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { ToolWindowManager.getInstance(myProject).activateEditorComponent(); } }); } @Override public void ancestorRemoved(AncestorEvent event) {} }; addAncestorListener(ancestorListener); Disposer.register(myProject, () -> removeAncestorListener(ancestorListener)); } public void setTitleActions(AnAction[] actions) { myHeader.setAdditionalTitleActions(actions); } public void setTabActions(AnAction[] actions) { myHeader.setTabActions(actions); } private class InnerPanelBorder implements Border { private final ToolWindow myWindow; private InnerPanelBorder(ToolWindow window) { myWindow = window; } @Override public void paintBorder(final Component c, final Graphics g, final int x, final int y, final int width, final int height) { g.setColor(JBColor.border()); doPaintBorder(c, g, x, y, width, height); } private void doPaintBorder(Component c, Graphics g, int x, int y, int width, int height) { Insets insets = getBorderInsets(c); if (insets.top > 0) { UIUtil.drawLine(g, x, y + insets.top - 1, x + width - 1, y + insets.top - 1); UIUtil.drawLine(g, x, y + insets.top, x + width - 1, y + insets.top); } if (insets.left > 0) { UIUtil.drawLine(g, x, y, x, y + height); UIUtil.drawLine(g, x + 1, y, x + 1, y + height); } if (insets.right > 0) { UIUtil.drawLine(g, x + width - 1, y + insets.top, x + width - 1, y + height); UIUtil.drawLine(g, x + width, y + insets.top, x + width, y + height); } if (insets.bottom > 0) { UIUtil.drawLine(g, x, y + height - 1, x + width, y + height - 1); UIUtil.drawLine(g, x, y + height, x + width, y + height); } } @Override public Insets getBorderInsets(final Component c) { if (myProject == null) return new Insets(0, 0, 0, 0); ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject); if (!(toolWindowManager instanceof ToolWindowManagerImpl) || !((ToolWindowManagerImpl)toolWindowManager).isToolWindowRegistered(myInfo.getId()) || myWindow.getType() == ToolWindowType.FLOATING || myWindow.getType() == ToolWindowType.WINDOWED) { return new Insets(0, 0, 0, 0); } ToolWindowAnchor anchor = myWindow.getAnchor(); Component component = myWindow.getComponent(); Container parent = component.getParent(); boolean isSplitter = false; boolean isFirstInSplitter = false; boolean isVerticalSplitter = false; while(parent != null) { if (parent instanceof Splitter) { Splitter splitter = (Splitter)parent; isSplitter = true; isFirstInSplitter = splitter.getFirstComponent() == component; isVerticalSplitter = splitter.isVertical(); break; } component = parent; parent = component.getParent(); } int top = isSplitter && (anchor == ToolWindowAnchor.RIGHT || anchor == ToolWindowAnchor.LEFT) && myInfo.isSplit() && isVerticalSplitter ? -1 : 0; int left = anchor == ToolWindowAnchor.RIGHT && (!isSplitter || isVerticalSplitter || isFirstInSplitter) ? 1 : 0; int bottom = 0; int right = anchor == ToolWindowAnchor.LEFT && (!isSplitter || isVerticalSplitter || !isFirstInSplitter) ? 1 : 0; return new Insets(top, left, bottom, right); } @Override public boolean isBorderOpaque() { return false; } } public final ActionGroup createPopupGroup() { return createPopupGroup(false); } public final ActionGroup createPopupGroup(boolean skipHideAction) { final DefaultActionGroup group = createGearPopupGroup(); if (myInfo == null) { return group; } if (!ToolWindowId.PREVIEW.equals(myInfo.getId())) { group.add(myToggleContentUiTypeAction); } final DefaultActionGroup moveGroup = new DefaultActionGroup(UIBundle.message("tool.window.move.to.action.group.name"), true); final ToolWindowAnchor anchor = myInfo.getAnchor(); if (anchor != ToolWindowAnchor.TOP) { final AnAction topAction = new ChangeAnchorAction(UIBundle.message("tool.window.move.to.top.action.name"), ToolWindowAnchor.TOP); moveGroup.add(topAction); } if (anchor != ToolWindowAnchor.LEFT) { final AnAction leftAction = new ChangeAnchorAction(UIBundle.message("tool.window.move.to.left.action.name"), ToolWindowAnchor.LEFT); moveGroup.add(leftAction); } if (anchor != ToolWindowAnchor.BOTTOM) { final AnAction bottomAction = new ChangeAnchorAction(UIBundle.message("tool.window.move.to.bottom.action.name"), ToolWindowAnchor.BOTTOM); moveGroup.add(bottomAction); } if (anchor != ToolWindowAnchor.RIGHT) { final AnAction rightAction = new ChangeAnchorAction(UIBundle.message("tool.window.move.to.right.action.name"), ToolWindowAnchor.RIGHT); moveGroup.add(rightAction); } group.add(moveGroup); DefaultActionGroup resize = new DefaultActionGroup(ActionsBundle.groupText("ResizeToolWindowGroup"), true); resize.add(new ResizeToolWindowAction.Left(myToolWindow, this)); resize.add(new ResizeToolWindowAction.Right(myToolWindow, this)); resize.add(new ResizeToolWindowAction.Up(myToolWindow, this)); resize.add(new ResizeToolWindowAction.Down(myToolWindow, this)); resize.add(ActionManager.getInstance().getAction("MaximizeToolWindow")); group.add(resize); if (!skipHideAction) { group.addSeparator(); group.add(new HideAction()); } group.addSeparator(); group.add(new ContextHelpAction() { @Nullable @Override protected String getHelpId(DataContext dataContext) { Content content = myToolWindow.getContentManager().getSelectedContent(); if (content != null) { String helpId = content.getHelpId(); if (helpId != null) { return helpId; } } String id = myToolWindow.getHelpId(); if (id != null) { return id; } DataContext context = content != null ? DataManager.getInstance().getDataContext(content.getComponent()) : dataContext; return super.getHelpId(context); } @Override public void update(@NotNull AnActionEvent e) { super.update(e); e.getPresentation().setEnabledAndVisible(getHelpId(e.getDataContext()) != null); } }); return group; } private DefaultActionGroup createGearPopupGroup() { return new GearActionGroup(); } private class GearActionGroup extends DefaultActionGroup { GearActionGroup() { getTemplatePresentation().setIcon(AllIcons.General.GearPlain); getTemplatePresentation().setText("Show Options Menu"); if (myInfo == null) return; if (myAdditionalGearActions != null) { if (myAdditionalGearActions.isPopup() && !StringUtil.isEmpty(myAdditionalGearActions.getTemplatePresentation().getText())) { add(myAdditionalGearActions); } else { addSorted(this, myAdditionalGearActions); } addSeparator(); } addAction(myToggleToolbarGroup).setAsSecondary(true); if (myInfo.isDocked()) { add(myToggleAutoHideModeAction); add(myToggleDockModeAction); add(myToggleFloatingModeAction); add(myToggleWindowedModeAction); add(myToggleSideModeAction); } else if (myInfo.isFloating()) { add(myToggleAutoHideModeAction); add(myToggleFloatingModeAction); add(myToggleWindowedModeAction); } else if (myInfo.isWindowed()) { add(myToggleFloatingModeAction); add(myToggleWindowedModeAction); } else if (myInfo.isSliding()) { if (!ToolWindowId.PREVIEW.equals(myInfo.getId())) { add(myToggleDockModeAction); } add(myToggleFloatingModeAction); add(myToggleWindowedModeAction); add(myToggleSideModeAction); } add(myHideStripeButtonAction); } } private static void addSorted(DefaultActionGroup main, ActionGroup group) { final AnAction[] children = group.getChildren(null); boolean hadSecondary = false; for (AnAction action : children) { if (group.isPrimary(action)) { main.add(action); } else { hadSecondary = true; } } if (hadSecondary) { main.addSeparator(); for (AnAction action : children) { if (!group.isPrimary(action)) { main.addAction(action).setAsSecondary(true); } } } String separatorText = group.getTemplatePresentation().getText(); if (children.length > 0 && !StringUtil.isEmpty(separatorText)) { main.addAction(new Separator(separatorText), Constraints.FIRST); } } /** * @return tool window associated with the decorator. */ final ToolWindowImpl getToolWindow() { return myToolWindow; } /** * @return last window info applied to the decorator. */ @NotNull final WindowInfoImpl getWindowInfo() { return myInfo; } public int getHeaderHeight() { return myHeader.getPreferredSize().height; } public void setHeaderVisible(boolean value) { myHeader.setVisible(value); } @Override protected final void processComponentEvent(final ComponentEvent e) { super.processComponentEvent(e); if (ComponentEvent.COMPONENT_RESIZED == e.getID()) { fireResized(); } } void removeStripeButton() { fireVisibleOnPanelChanged(false); fireHidden(); } void showStripeButton() { fireVisibleOnPanelChanged(true); fireActivated(); } private final class ChangeAnchorAction extends AnAction implements DumbAware { @NotNull private final ToolWindowAnchor myAnchor; ChangeAnchorAction(@NotNull String title, @NotNull ToolWindowAnchor anchor) { super(title); myAnchor = anchor; } @Override public final void actionPerformed(@NotNull final AnActionEvent e) { fireAnchorChanged(myAnchor); } } private final class TogglePinnedModeAction extends ToggleAction implements DumbAware { TogglePinnedModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_PINNED_MODE_ACTION_ID)); } @Override public final boolean isSelected(@NotNull final AnActionEvent event) { return !myInfo.isAutoHide(); } @Override public final void setSelected(@NotNull final AnActionEvent event, final boolean flag) { fireAutoHideChanged(!myInfo.isAutoHide()); } @Override public void update(@NotNull AnActionEvent e) { super.update(e); e.getPresentation().setVisible(myInfo.getType() != ToolWindowType.FLOATING && myInfo.getType() != ToolWindowType.WINDOWED); } } private final class ToggleDockModeAction extends ToggleAction implements DumbAware { ToggleDockModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_DOCK_MODE_ACTION_ID)); } @Override public final boolean isSelected(@NotNull final AnActionEvent event) { return myInfo.isDocked(); } @Override public final void setSelected(@NotNull final AnActionEvent event, final boolean flag) { if (myInfo.isDocked()) { fireTypeChanged(ToolWindowType.SLIDING); } else if (myInfo.isSliding()) { fireTypeChanged(ToolWindowType.DOCKED); } } } private final class ToggleFloatingModeAction extends ToggleAction implements DumbAware { ToggleFloatingModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_FLOATING_MODE_ACTION_ID)); } @Override public final boolean isSelected(@NotNull final AnActionEvent event) { return myInfo.isFloating(); } @Override public final void setSelected(@NotNull final AnActionEvent event, final boolean flag) { if (myInfo.isFloating()) { fireTypeChanged(myInfo.getInternalType()); } else { fireTypeChanged(ToolWindowType.FLOATING); } } } private final class ToggleWindowedModeAction extends ToggleAction implements DumbAware { ToggleWindowedModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_WINDOWED_MODE_ACTION_ID)); } @Override public final boolean isSelected(@NotNull final AnActionEvent event) { return myInfo.isWindowed(); } @Override public final void setSelected(@NotNull final AnActionEvent event, final boolean flag) { if (myInfo.isWindowed()) { fireTypeChanged(myInfo.getInternalType()); } else { fireTypeChanged(ToolWindowType.WINDOWED); } } } private final class ToggleSideModeAction extends ToggleAction implements DumbAware { ToggleSideModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_SIDE_MODE_ACTION_ID)); } @Override public final boolean isSelected(@NotNull final AnActionEvent event) { return myInfo.isSplit(); } @Override public final void setSelected(@NotNull final AnActionEvent event, final boolean flag) { fireSideStatusChanged(flag); } } private final class RemoveStripeButtonAction extends AnAction implements DumbAware { RemoveStripeButtonAction() { Presentation presentation = getTemplatePresentation(); presentation.setText(ActionsBundle.message("action.RemoveStripeButton.text")); presentation.setDescription(ActionsBundle.message("action.RemoveStripeButton.description")); } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabledAndVisible(myInfo.isShowStripeButton()); } @Override public void actionPerformed(@NotNull AnActionEvent e) { removeStripeButton(); } } private final class HideAction extends AnAction implements DumbAware { @NonNls public static final String HIDE_ACTIVE_WINDOW_ACTION_ID = InternalDecorator.HIDE_ACTIVE_WINDOW_ACTION_ID; HideAction() { copyFrom(ActionManager.getInstance().getAction(HIDE_ACTIVE_WINDOW_ACTION_ID)); getTemplatePresentation().setText(UIBundle.message("tool.window.hide.action.name")); } @Override public final void actionPerformed(@NotNull final AnActionEvent e) { fireHidden(); } @Override public final void update(@NotNull final AnActionEvent event) { final Presentation presentation = event.getPresentation(); presentation.setEnabled(myInfo.isVisible()); } } private final class ToggleContentUiTypeAction extends ToggleAction implements DumbAware { private boolean myHadSeveralContents; private ToggleContentUiTypeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_CONTENT_UI_TYPE_ACTION_ID)); } @Override public void update(@NotNull AnActionEvent e) { myHadSeveralContents = myHadSeveralContents || myToolWindow.getContentManager().getContentCount() > 1; super.update(e); e.getPresentation().setVisible(myHadSeveralContents); } @Override public boolean isSelected(@NotNull AnActionEvent e) { return myInfo.getContentUiType() == ToolWindowContentUiType.COMBO; } @Override public void setSelected(@NotNull AnActionEvent e, boolean state) { fireContentUiTypeChanges(state ? ToolWindowContentUiType.COMBO : ToolWindowContentUiType.TABBED); } } private final class MyDivider extends JPanel { private boolean myDragging; private Point myLastPoint; private Disposable myDisposable; private IdeGlassPane myGlassPane; private final MouseAdapter myListener = new MyMouseAdapter(); @Override public void addNotify() { super.addNotify(); myGlassPane = IdeGlassPaneUtil.find(this); myDisposable = Disposer.newDisposable(); myGlassPane.addMouseMotionPreprocessor(myListener, myDisposable); myGlassPane.addMousePreprocessor(myListener, myDisposable); } @Override public void removeNotify() { super.removeNotify(); if (myDisposable != null && !Disposer.isDisposed(myDisposable)) { Disposer.dispose(myDisposable); } } boolean isInDragZone(MouseEvent e) { final Point p = SwingUtilities.convertMouseEvent(e.getComponent(), e, this).getPoint(); return Math.abs(myInfo.getAnchor().isHorizontal() ? p.y : p.x) < 6; } private class MyMouseAdapter extends MouseAdapter { private void updateCursor(MouseEvent e) { if (isInDragZone(e)) { myGlassPane.setCursor(MyDivider.this.getCursor(), MyDivider.this); e.consume(); } } @Override public void mousePressed(MouseEvent e) { myDragging = isInDragZone(e); updateCursor(e); } @Override public void mouseClicked(MouseEvent e) { updateCursor(e); } @Override public void mouseReleased(MouseEvent e) { updateCursor(e); myDragging = false; } @Override public void mouseMoved(MouseEvent e) { updateCursor(e); } @Override public void mouseDragged(MouseEvent e) { if (!myDragging) return; MouseEvent event = SwingUtilities.convertMouseEvent(e.getComponent(), e, MyDivider.this); final ToolWindowAnchor anchor = myInfo.getAnchor(); final Point point = event.getPoint(); final Container windowPane = InternalDecorator.this.getParent(); myLastPoint = SwingUtilities.convertPoint(MyDivider.this, point, windowPane); myLastPoint.x = Math.min(Math.max(myLastPoint.x, 0), windowPane.getWidth()); myLastPoint.y = Math.min(Math.max(myLastPoint.y, 0), windowPane.getHeight()); final Rectangle bounds = InternalDecorator.this.getBounds(); if (anchor == ToolWindowAnchor.TOP) { InternalDecorator.this.setBounds(0, 0, bounds.width, myLastPoint.y); } else if (anchor == ToolWindowAnchor.LEFT) { InternalDecorator.this.setBounds(0, 0, myLastPoint.x, bounds.height); } else if (anchor == ToolWindowAnchor.BOTTOM) { InternalDecorator.this.setBounds(0, myLastPoint.y, bounds.width, windowPane.getHeight() - myLastPoint.y); } else if (anchor == ToolWindowAnchor.RIGHT) { InternalDecorator.this.setBounds(myLastPoint.x, 0, windowPane.getWidth() - myLastPoint.x, bounds.height); } InternalDecorator.this.validate(); e.consume(); } } @NotNull @Override public Cursor getCursor() { final boolean isVerticalCursor = myInfo.isDocked() ? myInfo.getAnchor().isSplitVertically() : myInfo.getAnchor().isHorizontal(); return isVerticalCursor ? Cursor.getPredefinedCursor(Cursor.S_RESIZE_CURSOR) : Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR); } } @Override public void putInfo(@NotNull Map<String, String> info) { info.put("toolWindowTitle", myToolWindow.getTitle()); final Content selection = myToolWindow.getContentManager().getSelectedContent(); if (selection != null) { info.put("toolWindowTab", selection.getTabName()); } } public void setAdditionalGearActions(@Nullable ActionGroup additionalGearActions) { myAdditionalGearActions = additionalGearActions; } @Override public AccessibleContext getAccessibleContext() { if (accessibleContext == null) { accessibleContext = new AccessibleInternalDecorator(); } return accessibleContext; } protected class AccessibleInternalDecorator extends AccessibleJPanel { @Override public String getAccessibleName() { String name = super.getAccessibleName(); if (name == null) { String title = StringUtil.defaultIfEmpty(myToolWindow.getTitle(), myToolWindow.getStripeTitle()); title = StringUtil.defaultIfEmpty(title, myToolWindow.getId()); name = StringUtil.notNullize(title) + " Tool Window"; } return name; } } /** * Installs a focus traversal policy for the tool window. * If the policy cannot handle a keystroke, it delegates the handling to * the nearest ancestors focus traversal policy. For instance, * this policy does not handle KeyEvent.VK_ESCAPE, so it can delegate the handling * to a ThreeComponentSplitter instance. */ public static void installFocusTraversalPolicy(@NotNull Container container, @NotNull FocusTraversalPolicy policy) { container.setFocusCycleRoot(true); container.setFocusTraversalPolicyProvider(true); container.setFocusTraversalPolicy(policy); installDefaultFocusTraversalKeys(container, KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS); installDefaultFocusTraversalKeys(container, KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS); } private static void installDefaultFocusTraversalKeys(@NotNull Container container, int id) { container.setFocusTraversalKeys(id, KeyboardFocusManager.getCurrentKeyboardFocusManager().getDefaultFocusTraversalKeys(id)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.client.api.impl; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputByteBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerReport; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.client.ClientRMProxy; import org.apache.hadoop.yarn.client.api.AHSClient; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.client.api.YarnClientApplication; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.ApplicationIdNotProvidedException; import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import com.google.common.annotations.VisibleForTesting; @Private @Unstable public class YarnClientImpl extends YarnClient { private static final Log LOG = LogFactory.getLog(YarnClientImpl.class); protected ApplicationClientProtocol rmClient; protected long submitPollIntervalMillis; private long asyncApiPollIntervalMillis; private long asyncApiPollTimeoutMillis; protected AHSClient historyClient; private boolean historyServiceEnabled; protected TimelineClient timelineClient; @VisibleForTesting Text timelineService; @VisibleForTesting String timelineDTRenewer; protected boolean timelineServiceEnabled; protected boolean timelineServiceBestEffort; private static final String ROOT = "root"; public YarnClientImpl() { super(YarnClientImpl.class.getName()); } @SuppressWarnings("deprecation") @Override protected void serviceInit(Configuration conf) throws Exception { asyncApiPollIntervalMillis = conf.getLong(YarnConfiguration.YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS, YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS); asyncApiPollTimeoutMillis = conf.getLong(YarnConfiguration.YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_TIMEOUT_MS, YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_TIMEOUT_MS); submitPollIntervalMillis = asyncApiPollIntervalMillis; if (conf.get(YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS) != null) { submitPollIntervalMillis = conf.getLong( YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS, YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS); } if (conf.getBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED, YarnConfiguration.DEFAULT_APPLICATION_HISTORY_ENABLED)) { historyServiceEnabled = true; historyClient = AHSClient.createAHSClient(); historyClient.init(conf); } if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) { timelineServiceEnabled = true; timelineClient = createTimelineClient(); timelineClient.init(conf); timelineDTRenewer = getTimelineDelegationTokenRenewer(conf); timelineService = TimelineUtils.buildTimelineTokenService(conf); } timelineServiceBestEffort = conf.getBoolean( YarnConfiguration.TIMELINE_SERVICE_CLIENT_BEST_EFFORT, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_CLIENT_BEST_EFFORT); super.serviceInit(conf); } TimelineClient createTimelineClient() throws IOException, YarnException { return TimelineClient.createTimelineClient(); } @Override protected void serviceStart() throws Exception { try { rmClient = ClientRMProxy.createRMProxy(getConfig(), ApplicationClientProtocol.class); if (historyServiceEnabled) { historyClient.start(); } if (timelineServiceEnabled) { timelineClient.start(); } } catch (IOException e) { throw new YarnRuntimeException(e); } super.serviceStart(); } @Override protected void serviceStop() throws Exception { if (this.rmClient != null) { RPC.stopProxy(this.rmClient); } if (historyServiceEnabled) { historyClient.stop(); } if (timelineServiceEnabled) { timelineClient.stop(); } super.serviceStop(); } private GetNewApplicationResponse getNewApplication() throws YarnException, IOException { GetNewApplicationRequest request = Records.newRecord(GetNewApplicationRequest.class); return rmClient.getNewApplication(request); } @Override public YarnClientApplication createApplication() throws YarnException, IOException { ApplicationSubmissionContext context = Records.newRecord (ApplicationSubmissionContext.class); GetNewApplicationResponse newApp = getNewApplication(); ApplicationId appId = newApp.getApplicationId(); context.setApplicationId(appId); return new YarnClientApplication(newApp, context); } @Override public ApplicationId submitApplication(ApplicationSubmissionContext appContext) throws YarnException, IOException { ApplicationId applicationId = appContext.getApplicationId(); if (applicationId == null) { throw new ApplicationIdNotProvidedException( "ApplicationId is not provided in ApplicationSubmissionContext"); } SubmitApplicationRequest request = Records.newRecord(SubmitApplicationRequest.class); request.setApplicationSubmissionContext(appContext); // Automatically add the timeline DT into the CLC // Only when the security and the timeline service are both enabled if (isSecurityEnabled() && timelineServiceEnabled) { addTimelineDelegationToken(appContext.getAMContainerSpec()); } //TODO: YARN-1763:Handle RM failovers during the submitApplication call. rmClient.submitApplication(request); int pollCount = 0; long startTime = System.currentTimeMillis(); EnumSet<YarnApplicationState> waitingStates = EnumSet.of(YarnApplicationState.NEW, YarnApplicationState.NEW_SAVING, YarnApplicationState.SUBMITTED); EnumSet<YarnApplicationState> failToSubmitStates = EnumSet.of(YarnApplicationState.FAILED, YarnApplicationState.KILLED); while (true) { try { ApplicationReport appReport = getApplicationReport(applicationId); YarnApplicationState state = appReport.getYarnApplicationState(); if (!waitingStates.contains(state)) { if(failToSubmitStates.contains(state)) { throw new YarnException("Failed to submit " + applicationId + " to YARN : " + appReport.getDiagnostics()); } LOG.info("Submitted application " + applicationId); break; } long elapsedMillis = System.currentTimeMillis() - startTime; if (enforceAsyncAPITimeout() && elapsedMillis >= asyncApiPollTimeoutMillis) { throw new YarnException("Timed out while waiting for application " + applicationId + " to be submitted successfully"); } // Notify the client through the log every 10 poll, in case the client // is blocked here too long. if (++pollCount % 10 == 0) { LOG.info("Application submission is not finished, " + "submitted application " + applicationId + " is still in " + state); } try { Thread.sleep(submitPollIntervalMillis); } catch (InterruptedException ie) { LOG.error("Interrupted while waiting for application " + applicationId + " to be successfully submitted."); } } catch (ApplicationNotFoundException ex) { // FailOver or RM restart happens before RMStateStore saves // ApplicationState LOG.info("Re-submit application " + applicationId + "with the " + "same ApplicationSubmissionContext"); rmClient.submitApplication(request); } } return applicationId; } private void addTimelineDelegationToken( ContainerLaunchContext clc) throws YarnException, IOException { Credentials credentials = new Credentials(); DataInputByteBuffer dibb = new DataInputByteBuffer(); ByteBuffer tokens = clc.getTokens(); if (tokens != null) { dibb.reset(tokens); credentials.readTokenStorageStream(dibb); tokens.rewind(); } // If the timeline delegation token is already in the CLC, no need to add // one more for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials .getAllTokens()) { if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) { return; } } org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier> timelineDelegationToken = getTimelineDelegationToken(); if (timelineDelegationToken == null) { return; } credentials.addToken(timelineService, timelineDelegationToken); if (LOG.isDebugEnabled()) { LOG.debug("Add timline delegation token into credentials: " + timelineDelegationToken); } DataOutputBuffer dob = new DataOutputBuffer(); credentials.writeTokenStorageToStream(dob); tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); clc.setTokens(tokens); } @VisibleForTesting org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier> getTimelineDelegationToken() throws IOException, YarnException { try { return timelineClient.getDelegationToken(timelineDTRenewer); } catch (Exception e ) { if (timelineServiceBestEffort) { LOG.warn("Failed to get delegation token from the timeline server: " + e.getMessage()); return null; } throw e; } } private static String getTimelineDelegationTokenRenewer(Configuration conf) throws IOException, YarnException { // Parse the RM daemon user if it exists in the config String rmPrincipal = conf.get(YarnConfiguration.RM_PRINCIPAL); String renewer = null; if (rmPrincipal != null && rmPrincipal.length() > 0) { String rmHost = conf.getSocketAddr( YarnConfiguration.RM_ADDRESS, YarnConfiguration.DEFAULT_RM_ADDRESS, YarnConfiguration.DEFAULT_RM_PORT).getHostName(); renewer = SecurityUtil.getServerPrincipal(rmPrincipal, rmHost); } return renewer; } @Private @VisibleForTesting protected boolean isSecurityEnabled() { return UserGroupInformation.isSecurityEnabled(); } @Override public void killApplication(ApplicationId applicationId) throws YarnException, IOException { KillApplicationRequest request = Records.newRecord(KillApplicationRequest.class); request.setApplicationId(applicationId); try { int pollCount = 0; long startTime = System.currentTimeMillis(); while (true) { KillApplicationResponse response = rmClient.forceKillApplication(request); if (response.getIsKillCompleted()) { LOG.info("Killed application " + applicationId); break; } long elapsedMillis = System.currentTimeMillis() - startTime; if (enforceAsyncAPITimeout() && elapsedMillis >= this.asyncApiPollTimeoutMillis) { throw new YarnException("Timed out while waiting for application " + applicationId + " to be killed."); } if (++pollCount % 10 == 0) { LOG.info("Waiting for application " + applicationId + " to be killed."); } Thread.sleep(asyncApiPollIntervalMillis); } } catch (InterruptedException e) { LOG.error("Interrupted while waiting for application " + applicationId + " to be killed."); } } @VisibleForTesting boolean enforceAsyncAPITimeout() { return asyncApiPollTimeoutMillis >= 0; } @Override public ApplicationReport getApplicationReport(ApplicationId appId) throws YarnException, IOException { GetApplicationReportResponse response = null; try { GetApplicationReportRequest request = Records .newRecord(GetApplicationReportRequest.class); request.setApplicationId(appId); response = rmClient.getApplicationReport(request); } catch (YarnException e) { if (!historyServiceEnabled) { // Just throw it as usual if historyService is not enabled. throw e; } // Even if history-service is enabled, treat all exceptions still the same // except the following if (!(e.getClass() == ApplicationNotFoundException.class)) { throw e; } return historyClient.getApplicationReport(appId); } return response.getApplicationReport(); } public org.apache.hadoop.security.token.Token<AMRMTokenIdentifier> getAMRMToken(ApplicationId appId) throws YarnException, IOException { Token token = getApplicationReport(appId).getAMRMToken(); org.apache.hadoop.security.token.Token<AMRMTokenIdentifier> amrmToken = null; if (token != null) { amrmToken = ConverterUtils.convertFromYarn(token, (Text) null); } return amrmToken; } @Override public List<ApplicationReport> getApplications() throws YarnException, IOException { return getApplications(null, null); } @Override public List<ApplicationReport> getApplications(Set<String> applicationTypes) throws YarnException, IOException { return getApplications(applicationTypes, null); } @Override public List<ApplicationReport> getApplications( EnumSet<YarnApplicationState> applicationStates) throws YarnException, IOException { return getApplications(null, applicationStates); } @Override public List<ApplicationReport> getApplications(Set<String> applicationTypes, EnumSet<YarnApplicationState> applicationStates) throws YarnException, IOException { GetApplicationsRequest request = GetApplicationsRequest.newInstance(applicationTypes, applicationStates); GetApplicationsResponse response = rmClient.getApplications(request); return response.getApplicationList(); } @Override public YarnClusterMetrics getYarnClusterMetrics() throws YarnException, IOException { GetClusterMetricsRequest request = Records.newRecord(GetClusterMetricsRequest.class); GetClusterMetricsResponse response = rmClient.getClusterMetrics(request); return response.getClusterMetrics(); } @Override public List<NodeReport> getNodeReports(NodeState... states) throws YarnException, IOException { EnumSet<NodeState> statesSet = (states.length == 0) ? EnumSet.allOf(NodeState.class) : EnumSet.noneOf(NodeState.class); for (NodeState state : states) { statesSet.add(state); } GetClusterNodesRequest request = GetClusterNodesRequest .newInstance(statesSet); GetClusterNodesResponse response = rmClient.getClusterNodes(request); return response.getNodeReports(); } @Override public Token getRMDelegationToken(Text renewer) throws YarnException, IOException { /* get the token from RM */ GetDelegationTokenRequest rmDTRequest = Records.newRecord(GetDelegationTokenRequest.class); rmDTRequest.setRenewer(renewer.toString()); GetDelegationTokenResponse response = rmClient.getDelegationToken(rmDTRequest); return response.getRMDelegationToken(); } private GetQueueInfoRequest getQueueInfoRequest(String queueName, boolean includeApplications, boolean includeChildQueues, boolean recursive) { GetQueueInfoRequest request = Records.newRecord(GetQueueInfoRequest.class); request.setQueueName(queueName); request.setIncludeApplications(includeApplications); request.setIncludeChildQueues(includeChildQueues); request.setRecursive(recursive); return request; } @Override public QueueInfo getQueueInfo(String queueName) throws YarnException, IOException { GetQueueInfoRequest request = getQueueInfoRequest(queueName, true, false, false); Records.newRecord(GetQueueInfoRequest.class); return rmClient.getQueueInfo(request).getQueueInfo(); } @Override public List<QueueUserACLInfo> getQueueAclsInfo() throws YarnException, IOException { GetQueueUserAclsInfoRequest request = Records.newRecord(GetQueueUserAclsInfoRequest.class); return rmClient.getQueueUserAcls(request).getUserAclsInfoList(); } @Override public List<QueueInfo> getAllQueues() throws YarnException, IOException { List<QueueInfo> queues = new ArrayList<QueueInfo>(); QueueInfo rootQueue = rmClient.getQueueInfo(getQueueInfoRequest(ROOT, false, true, true)) .getQueueInfo(); getChildQueues(rootQueue, queues, true); return queues; } @Override public List<QueueInfo> getRootQueueInfos() throws YarnException, IOException { List<QueueInfo> queues = new ArrayList<QueueInfo>(); QueueInfo rootQueue = rmClient.getQueueInfo(getQueueInfoRequest(ROOT, false, true, true)) .getQueueInfo(); getChildQueues(rootQueue, queues, false); return queues; } @Override public List<QueueInfo> getChildQueueInfos(String parent) throws YarnException, IOException { List<QueueInfo> queues = new ArrayList<QueueInfo>(); QueueInfo parentQueue = rmClient.getQueueInfo(getQueueInfoRequest(parent, false, true, false)) .getQueueInfo(); getChildQueues(parentQueue, queues, true); return queues; } private void getChildQueues(QueueInfo parent, List<QueueInfo> queues, boolean recursive) { List<QueueInfo> childQueues = parent.getChildQueues(); for (QueueInfo child : childQueues) { queues.add(child); if (recursive) { getChildQueues(child, queues, recursive); } } } @Private @VisibleForTesting public void setRMClient(ApplicationClientProtocol rmClient) { this.rmClient = rmClient; } @Override public ApplicationAttemptReport getApplicationAttemptReport( ApplicationAttemptId appAttemptId) throws YarnException, IOException { try { GetApplicationAttemptReportRequest request = Records .newRecord(GetApplicationAttemptReportRequest.class); request.setApplicationAttemptId(appAttemptId); GetApplicationAttemptReportResponse response = rmClient .getApplicationAttemptReport(request); return response.getApplicationAttemptReport(); } catch (YarnException e) { if (!historyServiceEnabled) { // Just throw it as usual if historyService is not enabled. throw e; } // Even if history-service is enabled, treat all exceptions still the same // except the following if (e.getClass() != ApplicationNotFoundException.class) { throw e; } return historyClient.getApplicationAttemptReport(appAttemptId); } } @Override public List<ApplicationAttemptReport> getApplicationAttempts( ApplicationId appId) throws YarnException, IOException { try { GetApplicationAttemptsRequest request = Records .newRecord(GetApplicationAttemptsRequest.class); request.setApplicationId(appId); GetApplicationAttemptsResponse response = rmClient .getApplicationAttempts(request); return response.getApplicationAttemptList(); } catch (YarnException e) { if (!historyServiceEnabled) { // Just throw it as usual if historyService is not enabled. throw e; } // Even if history-service is enabled, treat all exceptions still the same // except the following if (e.getClass() != ApplicationNotFoundException.class) { throw e; } return historyClient.getApplicationAttempts(appId); } } @Override public ContainerReport getContainerReport(ContainerId containerId) throws YarnException, IOException { try { GetContainerReportRequest request = Records .newRecord(GetContainerReportRequest.class); request.setContainerId(containerId); GetContainerReportResponse response = rmClient .getContainerReport(request); return response.getContainerReport(); } catch (YarnException e) { if (!historyServiceEnabled) { // Just throw it as usual if historyService is not enabled. throw e; } // Even if history-service is enabled, treat all exceptions still the same // except the following if (e.getClass() != ApplicationNotFoundException.class && e.getClass() != ContainerNotFoundException.class) { throw e; } return historyClient.getContainerReport(containerId); } } @Override public List<ContainerReport> getContainers( ApplicationAttemptId applicationAttemptId) throws YarnException, IOException { List<ContainerReport> containersForAttempt = new ArrayList<ContainerReport>(); boolean appNotFoundInRM = false; try { GetContainersRequest request = Records.newRecord(GetContainersRequest.class); request.setApplicationAttemptId(applicationAttemptId); GetContainersResponse response = rmClient.getContainers(request); containersForAttempt.addAll(response.getContainerList()); } catch (YarnException e) { if (e.getClass() != ApplicationNotFoundException.class || !historyServiceEnabled) { // If Application is not in RM and history service is enabled then we // need to check with history service else throw exception. throw e; } appNotFoundInRM = true; } if (historyServiceEnabled) { // Check with AHS even if found in RM because to capture info of finished // containers also List<ContainerReport> containersListFromAHS = null; try { containersListFromAHS = historyClient.getContainers(applicationAttemptId); } catch (IOException e) { // History service access might be enabled but system metrics publisher // is disabled hence app not found exception is possible if (appNotFoundInRM) { // app not found in bothM and RM then propagate the exception. throw e; } } if (null != containersListFromAHS && containersListFromAHS.size() > 0) { // remove duplicates Set<ContainerId> containerIdsToBeKeptFromAHS = new HashSet<ContainerId>(); Iterator<ContainerReport> tmpItr = containersListFromAHS.iterator(); while (tmpItr.hasNext()) { containerIdsToBeKeptFromAHS.add(tmpItr.next().getContainerId()); } Iterator<ContainerReport> rmContainers = containersForAttempt.iterator(); while (rmContainers.hasNext()) { ContainerReport tmp = rmContainers.next(); containerIdsToBeKeptFromAHS.remove(tmp.getContainerId()); // Remove containers from AHS as container from RM will have latest // information } if (containerIdsToBeKeptFromAHS.size() > 0 && containersListFromAHS.size() != containerIdsToBeKeptFromAHS .size()) { Iterator<ContainerReport> containersFromHS = containersListFromAHS.iterator(); while (containersFromHS.hasNext()) { ContainerReport containerReport = containersFromHS.next(); if (containerIdsToBeKeptFromAHS.contains(containerReport .getContainerId())) { containersForAttempt.add(containerReport); } } } else if (containersListFromAHS.size() == containerIdsToBeKeptFromAHS .size()) { containersForAttempt.addAll(containersListFromAHS); } } } return containersForAttempt; } @Override public void moveApplicationAcrossQueues(ApplicationId appId, String queue) throws YarnException, IOException { MoveApplicationAcrossQueuesRequest request = MoveApplicationAcrossQueuesRequest.newInstance(appId, queue); rmClient.moveApplicationAcrossQueues(request); } @Override public ReservationSubmissionResponse submitReservation( ReservationSubmissionRequest request) throws YarnException, IOException { return rmClient.submitReservation(request); } @Override public ReservationUpdateResponse updateReservation( ReservationUpdateRequest request) throws YarnException, IOException { return rmClient.updateReservation(request); } @Override public ReservationDeleteResponse deleteReservation( ReservationDeleteRequest request) throws YarnException, IOException { return rmClient.deleteReservation(request); } @Override public Map<NodeId, Set<String>> getNodeToLabels() throws YarnException, IOException { return rmClient.getNodeToLabels(GetNodesToLabelsRequest.newInstance()) .getNodeToLabels(); } @Override public Map<String, Set<NodeId>> getLabelsToNodes() throws YarnException, IOException { return rmClient.getLabelsToNodes(GetLabelsToNodesRequest.newInstance()) .getLabelsToNodes(); } @Override public Map<String, Set<NodeId>> getLabelsToNodes(Set<String> labels) throws YarnException, IOException { return rmClient.getLabelsToNodes( GetLabelsToNodesRequest.newInstance(labels)).getLabelsToNodes(); } @Override public Set<String> getClusterNodeLabels() throws YarnException, IOException { return rmClient.getClusterNodeLabels( GetClusterNodeLabelsRequest.newInstance()).getNodeLabels(); } }
/** */ package gluemodel.substationStandard.LNNodes.LNGroupA.impl; import gluemodel.substationStandard.Dataclasses.BSC; import gluemodel.substationStandard.Dataclasses.SPC; import gluemodel.substationStandard.LNNodes.LNGroupA.ANCR; import gluemodel.substationStandard.LNNodes.LNGroupA.LNGroupAPackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>ANCR</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link gluemodel.substationStandard.LNNodes.LNGroupA.impl.ANCRImpl#getTapChg <em>Tap Chg</em>}</li> * <li>{@link gluemodel.substationStandard.LNNodes.LNGroupA.impl.ANCRImpl#getRCol <em>RCol</em>}</li> * <li>{@link gluemodel.substationStandard.LNNodes.LNGroupA.impl.ANCRImpl#getLCol <em>LCol</em>}</li> * </ul> * * @generated */ public class ANCRImpl extends GroupAImpl implements ANCR { /** * The cached value of the '{@link #getTapChg() <em>Tap Chg</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTapChg() * @generated * @ordered */ protected BSC tapChg; /** * The cached value of the '{@link #getRCol() <em>RCol</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRCol() * @generated * @ordered */ protected SPC rCol; /** * The cached value of the '{@link #getLCol() <em>LCol</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLCol() * @generated * @ordered */ protected SPC lCol; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ANCRImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return LNGroupAPackage.Literals.ANCR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public BSC getTapChg() { if (tapChg != null && tapChg.eIsProxy()) { InternalEObject oldTapChg = (InternalEObject)tapChg; tapChg = (BSC)eResolveProxy(oldTapChg); if (tapChg != oldTapChg) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupAPackage.ANCR__TAP_CHG, oldTapChg, tapChg)); } } return tapChg; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public BSC basicGetTapChg() { return tapChg; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTapChg(BSC newTapChg) { BSC oldTapChg = tapChg; tapChg = newTapChg; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupAPackage.ANCR__TAP_CHG, oldTapChg, tapChg)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SPC getRCol() { if (rCol != null && rCol.eIsProxy()) { InternalEObject oldRCol = (InternalEObject)rCol; rCol = (SPC)eResolveProxy(oldRCol); if (rCol != oldRCol) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupAPackage.ANCR__RCOL, oldRCol, rCol)); } } return rCol; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SPC basicGetRCol() { return rCol; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setRCol(SPC newRCol) { SPC oldRCol = rCol; rCol = newRCol; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupAPackage.ANCR__RCOL, oldRCol, rCol)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SPC getLCol() { if (lCol != null && lCol.eIsProxy()) { InternalEObject oldLCol = (InternalEObject)lCol; lCol = (SPC)eResolveProxy(oldLCol); if (lCol != oldLCol) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupAPackage.ANCR__LCOL, oldLCol, lCol)); } } return lCol; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SPC basicGetLCol() { return lCol; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setLCol(SPC newLCol) { SPC oldLCol = lCol; lCol = newLCol; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupAPackage.ANCR__LCOL, oldLCol, lCol)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case LNGroupAPackage.ANCR__TAP_CHG: if (resolve) return getTapChg(); return basicGetTapChg(); case LNGroupAPackage.ANCR__RCOL: if (resolve) return getRCol(); return basicGetRCol(); case LNGroupAPackage.ANCR__LCOL: if (resolve) return getLCol(); return basicGetLCol(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case LNGroupAPackage.ANCR__TAP_CHG: setTapChg((BSC)newValue); return; case LNGroupAPackage.ANCR__RCOL: setRCol((SPC)newValue); return; case LNGroupAPackage.ANCR__LCOL: setLCol((SPC)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case LNGroupAPackage.ANCR__TAP_CHG: setTapChg((BSC)null); return; case LNGroupAPackage.ANCR__RCOL: setRCol((SPC)null); return; case LNGroupAPackage.ANCR__LCOL: setLCol((SPC)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case LNGroupAPackage.ANCR__TAP_CHG: return tapChg != null; case LNGroupAPackage.ANCR__RCOL: return rCol != null; case LNGroupAPackage.ANCR__LCOL: return lCol != null; } return super.eIsSet(featureID); } } //ANCRImpl
package edu.umass.cs.pig.test; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileOutputStream; import java.util.Map; import java.util.Properties; import org.apache.pig.ExecType; import org.apache.pig.PigServer; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.data.DataBag; import org.apache.pig.impl.PigContext; import org.apache.pig.newplan.Operator; import org.junit.BeforeClass; import org.junit.Test; public class TestExGen4SDSSReal { static PigContext pigContext = new PigContext(ExecType.LOCAL, new Properties()); static String PhotoObj, Galaxy, SpecClass, SpecObj, Galaxy2, Galaxy3, PhotoPrimary, Galaxy4, PhotoPrimary2, Galaxy5, PhotoObj2;//, PhotoObj3; // static File filePhotoObj; { try { pigContext.connect(); } catch (ExecException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @BeforeClass public static void oneTimeSetup() throws Exception { // filePhotoObj = File.createTempFile("dataPhotoObj", ".dat"); // writeData(filePhotoObj); // filePhotoObj.deleteOnExit(); PhotoObj = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/BasicFdr/basic0.dat" + "'"; Galaxy = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/MergingCondition/GalaxyPair0" + "'"; SpecClass = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/SpecClassFdr/specClass0.dat" + "'"; SpecObj = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/specObjIDFdr/specObj0.dat" + "'"; Galaxy2 = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/multipleCriteria/galaxy2N0.dat" + "'"; Galaxy3 = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/spatialUnitVectors/galaxy3N0.dat" + "'"; PhotoPrimary = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/CVs/PhotoPrimary0.dat" + "'"; Galaxy4 = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/lowzQSOs/Galaxy4N0.dat" + "'"; PhotoPrimary2 = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/velocitiesErrors/photoPrimary2N0.dat" + "'"; Galaxy5 = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/bETWEEN/Galaxy5N0.dat" + "'"; PhotoObj2 = "'" + "/home/kaituo/code/pig3/trunk/SDSSRandomData/10/MovingAsteroids/PhotoObj2N0.dat" + "'"; // PhotoObj3 = "'" + "/home/kaituo/code/pig3/trunk/SDSSRealData/movingAsteroids2" + "'"; // System.out.println("PhotoObj : " + PhotoObj + "\n"); // System.out.println("Test data created."); } /** * SELECT top 10 objID, field, ra, dec, run from PhotoObj * * objID bigint * field smallint * ra float * dec float * run smallint * * objID field ra dec run 758882625380943288 12 50.7087978370105 76.9631177132159 6074 758882625380942911 12 50.7990706615098 77.0751444481533 6074 758882625380942429 12 50.7447936093687 77.0303566953601 6074 758882625380942275 12 50.7502262046164 76.9432157376269 6074 758882625380942005 12 50.6069052556651 77.0319862609625 6074 758882625380941861 12 50.5479970090686 76.9939765386199 6074 758882625380942008 12 50.6200418736017 77.0308681539771 6074 758882625380942031 12 50.7019597858277 77.0215462421214 6074 758882625380942091 12 50.7423458829802 77.0451894855714 6074 758882625380942193 12 50.8090358164206 77.0809052417237 6074 */ private static void writeData(File dataFile) throws Exception { FileOutputStream dat = new FileOutputStream(dataFile); dat.write(( 758882625380943288L + "\t" + 12 + "\t" + 50.7087978370105 + "\t" + 76.9631177132159 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380942911L + "\t" + 12 + "\t" + 50.7990706615098 + "\t" + 77.0751444481533 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380942429L + "\t" + 12 + "\t" + 50.7447936093687 + "\t" + 77.0303566953601 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380942275L + "\t" + 12 + "\t" + 50.7502262046164 + "\t" + 76.9432157376269 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380942005L + "\t" + 12 + "\t" + 50.6069052556651 + "\t" + 77.0319862609625 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380941861L + "\t" + 12 + "\t" + 50.5479970090686 + "\t" + 76.9939765386199 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380942008L + "\t" + 12 + "\t" + 50.6200418736017 + "\t" + 77.0308681539771 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380942031L + "\t" + 12 + "\t" + 50.7019597858277 + "\t" + 77.0215462421214 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380942091L + "\t" + 12 + "\t" + 50.7423458829802 + "\t" + 77.0451894855714 + "\t" + 6074 + "\n") .getBytes()); dat.write(( 758882625380942193L + "\t" + 12 + "\t" + 50.8090358164206 + "\t" + 77.0809052417237 + "\t" + 6074 + "\n") .getBytes()); dat.close(); } /** * Basic SELECT-FROM-WHERE Back to TopTop -- Returns 5261 objects in DR2 (5278 in DR1) in a few sec. -- Find objects in a particular field. -- A basic SELECT - FROM - WHERE query. * SELECT objID, -- Get the unique object ID, field, ra, dec -- the field number, and coordinates FROM PhotoObj -- From the photometric data WHERE run=1336 and field = 11 -- that matches our criteria * SELECT top 10 objID, field, ra, dec, run from PhotoObj * objID bigint * field smallint * ra float * dec float * run smallint * * @throws Exception */ @Test public void testBasic() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A = load " + PhotoObj + " using PigStorage() as (objID : long, field : int, ra : float, dec : float, run : int);"); pigServer.registerQuery("B = filter A by run == 1336 and field ==11 ;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("B"); assertTrue(derivedData != null); }// /** * -- Returns 1000 objects in a few sec. -- Find all galaxies brighter than r magnitude 22, where the local -- extinction is > 0.175. This is a simple query that uses a WHERE clause, -- but now two conditions that must be met simultaneously. However, this -- query returns a lot of galaxies (29 Million in DR2!), so it will take a -- long time to get the results back. The sample therefore includes a -- "TOP 1000" restriction to make it run quickly. -- This query also introduces the Galaxy view, which contains the -- photometric parameters (no redshifts or spectra) for unresolved objects. * SELECT TOP 1000 objID FROM Galaxy WHERE r < 22 -- r IS NOT deredenned and extinction_r > 0.175 -- extinction more than 0.175 * SELECT top 10 objID, r, extinction_r FROM Galaxy * objID bigint extinction_r real r real * @throws Exception */ @Test public void testGalaxies2Criteria() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer .registerQuery("A = load " + Galaxy + " using PigStorage() as (objID : long, extinction_r : double, r : double);"); pigServer .registerQuery("B = filter A by r < 22.0 and extinction_r > 0.175 ;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("B"); assertTrue(derivedData != null); }// /** * -- Find all objects with unclassified spectra. -- A simple SELECT-FROM-WHERE query, using a function SELECT specObjID FROM SpecObj WHERE SpecClass = dbo.fSpecClass('UNKNOWN') * SELECT top 10 specObjID, SpecClass FROM SpecObj * unclassifiedSpectra1.dat * specObjID bigint * specClass smallint * * SELECT top 10 name, value FROM SpecClass * unclassifiedSpectra2.dat * name varchar * value int * @throws Exception */ @Test public void testUnclassifiedSpectra() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer .registerQuery("A = load " + SpecObj + " using PigStorage() as (name : chararray, value : int);\n"); pigServer.registerQuery("B = filter A by name eq 'UNKNOWN';\n"); pigServer .registerQuery("C = load " + SpecClass + " using PigStorage() as (specObjID : long, SpecClass : int);\n"); pigServer.registerQuery("D = join B by value, C by SpecClass;\n"); pigServer.registerQuery("E = foreach D generate specObjID;\n"); Map<Operator, DataBag> derivedData = pigServer.getExamples("E"); assertTrue(derivedData != null); }// /** * -- Find all galaxies with blue surface brightness between 23 and 25 -- mag per square arcseconds, and -10 < supergalactic latitude (sgb) < 10, and -- declination less than zero. Currently, we have to live with ra/dec until we -- get galactic coordinates. To calculate surface brightness per sq. arcsec, -- we use (g + rho), where g is the blue magnitude, and rho= 5*log(r). This -- query now has three requirements, one involving simple math. SELECT objID FROM Galaxy WHERE ra between 250 and 270 and dec > 50 and (g+rho) between 23 and 25 -- g is blue magnitude, and rho= 5*log(r) * SELECT top 10 objID, ra, dec, g, rho FROM Galaxy * objID bigint ra float dec float g real rho real * @throws Exception */ @Test public void testMultipleCriteria() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A = load " + Galaxy2 + " using PigStorage() as (objID : long, ra : float, dec : float, g : float, rho : float);"); pigServer.registerQuery("B = filter A by ra <= 270.0 and ra >= 250.0 and dec > 50.0 and g+rho >= 23.0 and g+rho <= 25.0 ;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("B"); assertTrue(derivedData != null); } /** * -- Find galaxies in a given area of the sky, using a coordinate cut -- in the unit vector cx,cy,cz that corresponds to RA beteen 40 and 100. -- Another simple query that uses math in the WHERE clause. SELECT colc_g, colc_r FROM Galaxy WHERE (-0.642788 * cx + 0.766044 * cy>=0) and (-0.984808 * cx - 0.173648 * cy <0) * SELECT TOP 10 colc_g, colc_r, cx, cy FROM Galaxy * colc_g real * colc_r real * cx float * cy float * @throws Exception */ @Test public void testSpatialUnitVectors() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A = load " + Galaxy3 + " using PigStorage() as (colc_g : float, colc_r : float, cx : float, cy : float);"); pigServer.registerQuery("B = filter A by (-0.642788 * cx + 0.766044 * cy>=0.0) and (-0.984808 * cx - 0.173648 * cy <0.0) ;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("B"); assertTrue(derivedData != null); } /** * -- Search for Cataclysmic Variables and pre-CVs with White Dwarfs and -- very late secondaries. Just uses some simple color cuts from Paula Szkody. -- Another simple query that uses math in the WHERE clause SELECT run, camCol, rerun, field, objID, u, g, r, i, z, ra, dec -- Just get some basic quantities FROM PhotoPrimary -- From all primary detections, regardless of class WHERE u - g < 0.4 and g - r < 0.7 and r - i > 0.4 and i - z > 0.4 -- that meet the color criteria * SELECT TOP 10 run, camCol, rerun, field, objID, u, g, r, i, z, ra, dec -- Just get some basic quantities FROM PhotoPrimary -- From all primary detections, regardless of class * run smallint * camcol tinyint * rerun smallint * field smallint * objID bigint * u real * g real * r real * i real * z real * ra float * dec float * @throws Exception */ @Test public void testCVs() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A = load " + PhotoPrimary + " using PigStorage() as (run : int, camcol : int, rerun : int, field : int, objID : long, u : double, g : double, r : double, i : double, z : double, ra : float, dec : float);"); pigServer.registerQuery("B = filter A by u - g < 0.4 and g - r < 0.7 and r - i > 0.4 and i - z > 0.4 ;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("B"); assertTrue(derivedData != null); } /** * -- Low-z QSO candidates using the color cuts from Gordon Richards. -- Also a simple query with a long WHERE clause. SELECT g, run, rerun, camcol, field, objID FROM Galaxy WHERE ( (g <= 22) and (u - g >= -0.27) and (u - g < 0.71) and (g - r >= -0.24) and (g - r < 0.35) and (r - i >= -0.27) and (r - i < 0.57) and (i - z >= -0.35) and (i - z < 0.70) ) * SELECT TOP 10 g, run, rerun, camcol, field, objID, u, r, i, z FROM Galaxy * g real * run smallint * rerun smallint * camcol tinyint * field smallint * objID bigint * u real * r real * i real * z real * @throws Exception */ @Test public void testLowzQSOs() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A = load " + Galaxy4 + " using PigStorage() as ( g : double, run : int, rerun : int, camcol : int, field : int, objID : int, u : double, r : double, i : double, z : double);"); pigServer.registerQuery("B = filter A by ( (g <= 22.0) and (u - g >= -0.27) and (u - g < 0.71) and (g - r >= -0.24) and (g - r < 0.35) and (r - i >= -0.27) and (r - i < 0.57) and (i - z >= -0.35) and (i - z < 0.70) ) ;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("B"); assertTrue(derivedData != null); } /** * -- Get object velocities and errors. This is also a simple query that uses a WHERE clause. -- However, we perform a more complex mathematical operation, using 'power' to -- exponentiate. (From Robert Lupton). -- NOTE: This query takes a long time to run without the "TOP 1000". * SELECT TOP 1000 run, camCol, field, objID, rowC, colC, rowV, colV, rowVErr, colVErr, flags, psfMag_u, psfMag_g, psfMag_r, psfMag_i, psfMag_z, psfMagErr_u, psfMagErr_g, psfMagErr_r, psfMagErr_i, psfMagErr_z FROM PhotoPrimary WHERE -- where the velocities are reliable power(rowv, 2) / power(rowvErr, 2) + power(colv, 2) / power(colvErr, 2) > 4 * SELECT TOP 10 run, -- get a bunch of quantities camCol, field, objID, rowC, colC, rowV, colV, rowvErr, colvErr, flags, psfMag_u, psfMag_g, psfMag_r, psfMag_i, psfMag_z, psfMagErr_u, psfMagErr_g, psfMagErr_r, psfMagErr_i, psfMagErr_z FROM PhotoPrimary * run smallint, camCol tinyint, field smallint, objID bigint, rowC real, colC real, rowV real, colV real, rowvErr real, colvErr real, flags bigint, psfMag_u real, psfMag_g real, psfMag_r real, psfMag_i real, psfMag_z real, psfMagErr_u real, psfMagErr_g real, psfMagErr_r real, psfMagErr_i real, psfMagErr_z real * @throws Exception */ @Test public void testVelocitiesErrors() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A = load " + PhotoPrimary2 + " using PigStorage() as (run : int, camCol : int, field : int, objID : long, rowC : double, colC : double, rowV : double, colV : double, rowvErr : double, colvErr : double, flags : long, psfMag_u : double, psfMag_g : double, psfMag_r : double, psfMag_i : double, psfMag_z : double, psfMagErr_u : double, psfMagErr_g : double, psfMagErr_r : double, psfMagErr_i : double, psfMagErr_z : double);"); pigServer.registerQuery("B = filter A by org.apache.pig.piggybank.evaluation.math.POW(rowV, 2.0) / org.apache.pig.piggybank.evaluation.math.POW(rowvErr, 2.0) + org.apache.pig.piggybank.evaluation.math.POW(colV, 2.0) / org.apache.pig.piggybank.evaluation.math.POW(colvErr, 2.0) > 4.0 ;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("B"); assertTrue(derivedData != null); } /** * -- Find galaxies with an isophotal surface brightness (SB) larger -- than 24 in the red band, and with an ellipticity > 0.5, and with the major -- axis of the ellipse having a declination between 30" and 60" arc seconds. -- This is also a simple query that uses a WHERE clause with three conditions -- that must be met. We introduce the syntax 'between' to do a range search. SELECT TOP 10 objID, r, rho, isoA_r FROM Galaxy WHERE r + rho < 24 -- red surface brightness more than -- 24 mag/sq-arcsec and isoA_r between 30/0.4 and 60/0.4 -- major axis between 30" and 60" -- (SDSS pixels = 0.4 arcsec) and (power(q_r,2) + power(u_r,2)) > 0.25 -- square of ellipticity > 0.5 squared * SELECT TOP 10 ObjID, r, rho, isoA_r, q_r, u_r FROM Galaxy * ObjID bigint, * r real, * rho real, * isoA_r real, * q_r real, * u_r real * @throws Exception */ @Test public void testBETWEEN() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A = load " + Galaxy5 + " using PigStorage() as (ObjID : long, r : double, rho : double, isoA_r : double, q_r : double, u_r : double);"); pigServer.registerQuery("B = filter A by r + rho < 24.0 and isoA_r >= 75.0 and isoA_r <= 150.0 and (org.apache.pig.piggybank.evaluation.math.POW(q_r,2.0) + org.apache.pig.piggybank.evaluation.math.POW(u_r,2.0)) > 0.25;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("B"); assertTrue(derivedData != null); } /** * -- Provide a list of moving objects consistent with an asteroid. -- Also a simple query, but we introduce the 'as' syntax, which allows us to -- name derived quantities in the result file. SELECT objID, sqrt( power(rowv,2) + power(colv, 2) ) as velocity FROM PhotoObj WHERE (power(rowv,2) + power(colv, 2)) > 50 and rowv >= 0 and colv >=0 * SELECT TOP 10 objID, rowv, colv FROM PhotoObj * objID bigint * rowv real * colV real * * @throws Exception */ @Test public void testMovingAsteroids() throws Exception { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A = load " + PhotoObj2 + " using PigStorage() as (objID : long, rowv : double, colv : double);"); pigServer.registerQuery("B = filter A by (org.apache.pig.piggybank.evaluation.math.POW(rowv,2.0) + org.apache.pig.piggybank.evaluation.math.POW(colv, 2.0)) > 50.0 and rowv >= 0.0 and colv >=0.0 ;"); pigServer.registerQuery("C = foreach B generate org.apache.pig.piggybank.evaluation.math.SQRT(org.apache.pig.piggybank.evaluation.math.POW(rowv,2.0) + org.apache.pig.piggybank.evaluation.math.POW(colv, 2.0)) as velocity;"); Map<Operator, DataBag> derivedData = pigServer.getExamples("C"); assertTrue(derivedData != null); } }
package programminglife.gui.controller; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import programminglife.gui.Alerts; import programminglife.model.Bookmark; import programminglife.utility.Console; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * This class is a controller for loading storing and deleting bookmarks. */ final class BookmarkController { private static final String BOOKMARKPATH = "bookmarks.xml"; /** * This class should not be instantiated. * The functionality of this class does not rely on any particular instance. * It relies solely on the contents of the bookmarks file. */ private BookmarkController() { } /** * Loads all the bookmarks and puts them in a Map. * * @return Map containing the bookmarks with their keys. */ static Map<String, List<Bookmark>> loadAllBookmarks() { return loadAllBookmarks(BOOKMARKPATH); } /** * Default bookmark storing method. * Uses the default bookmark path. * * @param bookmark The bookmark to store. * @return true if the bookmark is stored and did not exist yet, false otherwise */ static boolean storeBookmark(Bookmark bookmark) { return storeBookmark(BOOKMARKPATH, bookmark); } /** * Default bookmark storing method. * Uses the default bookmark path. * * @param graphName The name of the graph file. * @param bookMarkName The name of the bookmark (must be unique for the graph). */ static void deleteBookmark(String graphName, String bookMarkName) { deleteBookmark(BOOKMARKPATH, graphName, bookMarkName); } /** * Checks whether a bookmark exists. * * @param fileName The name of the bookmarks file * @param graphName The name of the graph file * @param bookmarkName The name of the bookmark * @return true if it exists, false otherwise */ private static boolean bookmarkExists(String fileName, String graphName, String bookmarkName) { Element doc = loadDoc(fileName); assert doc != null; Element graph = findTag(doc.getElementsByTagName("graph"), graphName); if (graph != null) { Element bookmark = findTag(graph.getElementsByTagName("bookmark"), bookmarkName); if (bookmark != null) { return true; } } return false; } /** * Store a bookmark in bookmarks.xml. * * @param fileName The path of the bookmarks file. * @param bookmark The bookmark to store. * @return true if the bookmark already exists, false otherwise. */ static boolean storeBookmark(String fileName, Bookmark bookmark) { checkFile(fileName); if (bookmarkExists(fileName, bookmark.getGraphName(), bookmark.getBookmarkName())) { return false; } try { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = docFactory.newDocumentBuilder(); Document doc = builder.parse(fileName); doc.getDocumentElement().normalize(); Element rootElement = doc.getDocumentElement(); Element newBookmark = createNewBookmarkTag(doc, bookmark); Element graphTag = findTag(doc.getElementsByTagName("graph"), bookmark.getGraphName()); // No earlier bookmarks in this graph if (graphTag == null) { graphTag = doc.createElement("graph"); Element graphNameTag = doc.createElement("name"); graphNameTag.appendChild(doc.createTextNode(bookmark.getGraphName())); graphTag.appendChild(graphNameTag); rootElement.appendChild(graphTag); } graphTag.appendChild(newBookmark); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); DOMSource source = new DOMSource(doc); StreamResult streamResult = new StreamResult(new File(fileName)); transformer.transform(source, streamResult); Console.println("Created bookmark " + bookmark.getBookmarkName() + " Center Node: " + bookmark.getNodeID() + " Radius: " + bookmark.getRadius()); return true; } catch (ParserConfigurationException | SAXException | IOException | TransformerException pce) { Alerts.error("The bookmarks cannot be stored"); } return false; } /** * Creates a bookmark Element. * * @param doc The Document to create the bookmark with * @param bookmark The bookmark to create the tag for * @return An element containing the new bookmark */ private static Element createNewBookmarkTag(Document doc, Bookmark bookmark) { Element newBookmark = doc.createElement("bookmark"); Element nameTag = doc.createElement("name"); nameTag.appendChild(doc.createTextNode(bookmark.getBookmarkName())); Element descriptionTag = doc.createElement("description"); descriptionTag.appendChild(doc.createTextNode(bookmark.getDescription())); Element radiusTag = doc.createElement("radius"); radiusTag.appendChild(doc.createTextNode(String.valueOf(bookmark.getRadius()))); Element iDTag = doc.createElement("ID"); iDTag.appendChild(doc.createTextNode(String.valueOf(bookmark.getNodeID()))); Element pathTag = doc.createElement("path"); pathTag.appendChild(doc.createTextNode(bookmark.getPath())); newBookmark.appendChild(nameTag); newBookmark.appendChild(iDTag); newBookmark.appendChild(radiusTag); newBookmark.appendChild(descriptionTag); newBookmark.appendChild(pathTag); return newBookmark; } /** * Deletes a bookmark from the bookmark file. * * @param fileName The name of the bookmark file. * @param graphName The name of the graph. * @param bookmarkName The name of the bookmark to be deleted. */ static void deleteBookmark(String fileName, String graphName, String bookmarkName) { Element doc = loadDoc(fileName); assert doc != null; Element graphTag = findTag(doc.getElementsByTagName("graph"), graphName); if (graphTag != null) { Element bookmarkTag = findTag(graphTag.getElementsByTagName("bookmark"), bookmarkName); if (bookmarkTag != null) { graphTag.removeChild(bookmarkTag); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer; try { transformer = transformerFactory.newTransformer(); DOMSource source = new DOMSource(doc); StreamResult streamResult = new StreamResult(new File(fileName)); transformer.transform(source, streamResult); } catch (TransformerException e) { Alerts.error("The bookmarks cannot be deleted"); } } } } /** * Finds and loads the doc element from the bookmark file. * * @param fileName The name of the file where the bookmarks reside. * @return A DOM Element containing all graphs and bookmarks. */ private static Element loadDoc(String fileName) { checkFile(fileName); Document dom; DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); try { DocumentBuilder builder = factory.newDocumentBuilder(); dom = builder.parse(fileName); dom.getDocumentElement().normalize(); return dom.getDocumentElement(); } catch (ParserConfigurationException | SAXException | IOException e) { Alerts.error("Bookmark file error"); } return null; } /** * Load all bookmarks from all files. * * @param fileName The bookmark file from which to read * @return A map of lists containing all bookmarks */ static Map<String, List<Bookmark>> loadAllBookmarks(String fileName) { Map<String, List<Bookmark>> result = new LinkedHashMap<>(); Element doc = loadDoc(fileName); assert doc != null; NodeList graphs = doc.getElementsByTagName("graph"); if (graphs != null) { for (int i = 0; i < graphs.getLength(); i++) { if (graphs.item(i).getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) graphs.item(i); String graphName = element.getElementsByTagName("name").item(0).getTextContent(); NodeList bookmarks = element.getElementsByTagName("bookmark"); result.put(graphName, parseBookmarks(graphName, bookmarks)); } } } return result; } /** * Find the tag in the xml file belonging to the graph name. * * @param nodeList is the list of all graph tags * @param name is the name of the graph this method tries to find * @return The Element containing the graph or null if not found */ private static Element findTag(NodeList nodeList, String name) { if (nodeList != null) { for (int i = 0; i < nodeList.getLength(); i++) { if (nodeList.item(i).getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) nodeList.item(i); if (element.getElementsByTagName("name").item(0).getTextContent().equals(name)) { return element; } } } } return null; } /** * Checks whether the given bookmark fileName exists. * If not it will create the file with the necessary tags. * * @param fileName The name of the bookmark file */ private static void checkFile(String fileName) { File bookmarkFile = new File(fileName); if (!bookmarkFile.exists()) { try { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = docFactory.newDocumentBuilder(); Document doc = builder.newDocument(); Element rootElement = doc.createElement("graphs"); doc.appendChild(rootElement); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); DOMSource source = new DOMSource(doc); StreamResult result = new StreamResult(bookmarkFile); transformer.transform(source, result); } catch (ParserConfigurationException | TransformerException e) { Alerts.error("The file could not be found or created"); } } } /** * Parses a list of bookmark nodes and returns them. * * @param graphName The file these bookmarks are about. * @param bookmarks The nodeList containing all bookmarks * @return A bookmark represented by the element */ private static List<Bookmark> parseBookmarks(String graphName, NodeList bookmarks) { List<Bookmark> result = new ArrayList<>(); if (bookmarks != null) { for (int j = 0; j < bookmarks.getLength(); j++) { if (bookmarks.item(j).getNodeType() == Node.ELEMENT_NODE) { Element el = (Element) bookmarks.item(j); result.add(parseBookmark(graphName, el)); } } return result; } return null; } /** * Parses an xml element to return a bookmark. * * @param graphName The file that this bookmark is about * @param el The element containing the bookmark * @return The bookmark from the element. */ private static Bookmark parseBookmark(String graphName, Element el) { if (el != null) { return new Bookmark(graphName, el.getElementsByTagName("path").item(0).getTextContent(), Integer.parseInt(el.getElementsByTagName("ID").item(0).getTextContent()), Integer.parseInt(el.getElementsByTagName("radius").item(0).getTextContent()), el.getElementsByTagName("name").item(0).getTextContent(), el.getElementsByTagName("description").item(0).getTextContent()); } return null; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import com.google.common.collect.ImmutableMap; import org.apache.lucene.search.Filter; import org.apache.lucene.search.Query; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.engine.IndexEngine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptService; import java.io.IOException; import java.util.EnumSet; import java.util.List; import java.util.Map; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS; /** * */ public class IndexQueryParserService extends AbstractIndexComponent { public static final class Defaults { public static final String QUERY_PREFIX = "index.queryparser.query"; public static final String FILTER_PREFIX = "index.queryparser.filter"; } private CloseableThreadLocal<QueryParseContext> cache = new CloseableThreadLocal<QueryParseContext>() { @Override protected QueryParseContext initialValue() { return new QueryParseContext(index, IndexQueryParserService.this); } }; final CacheRecycler cacheRecycler; final AnalysisService analysisService; final ScriptService scriptService; final MapperService mapperService; final SimilarityService similarityService; final IndexCache indexCache; final IndexFieldDataService fieldDataService; final IndexEngine indexEngine; private final Map<String, QueryParser> queryParsers; private final Map<String, FilterParser> filterParsers; private String defaultField; private boolean queryStringLenient; private final boolean strict; @Inject public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings, IndicesQueriesRegistry indicesQueriesRegistry, CacheRecycler cacheRecycler, ScriptService scriptService, AnalysisService analysisService, MapperService mapperService, IndexCache indexCache, IndexFieldDataService fieldDataService, IndexEngine indexEngine, @Nullable SimilarityService similarityService, @Nullable Map<String, QueryParserFactory> namedQueryParsers, @Nullable Map<String, FilterParserFactory> namedFilterParsers) { super(index, indexSettings); this.cacheRecycler = cacheRecycler; this.scriptService = scriptService; this.analysisService = analysisService; this.mapperService = mapperService; this.similarityService = similarityService; this.indexCache = indexCache; this.fieldDataService = fieldDataService; this.indexEngine = indexEngine; this.defaultField = indexSettings.get("index.query.default_field", AllFieldMapper.NAME); this.queryStringLenient = indexSettings.getAsBoolean("index.query_string.lenient", false); this.strict = indexSettings.getAsBoolean("index.query.parse.strict", false); List<QueryParser> queryParsers = newArrayList(); if (namedQueryParsers != null) { Map<String, Settings> queryParserGroups = indexSettings.getGroups(IndexQueryParserService.Defaults.QUERY_PREFIX); for (Map.Entry<String, QueryParserFactory> entry : namedQueryParsers.entrySet()) { String queryParserName = entry.getKey(); QueryParserFactory queryParserFactory = entry.getValue(); Settings queryParserSettings = queryParserGroups.get(queryParserName); if (queryParserSettings == null) { queryParserSettings = EMPTY_SETTINGS; } queryParsers.add(queryParserFactory.create(queryParserName, queryParserSettings)); } } Map<String, QueryParser> queryParsersMap = newHashMap(); queryParsersMap.putAll(indicesQueriesRegistry.queryParsers()); if (queryParsers != null) { for (QueryParser queryParser : queryParsers) { add(queryParsersMap, queryParser); } } this.queryParsers = ImmutableMap.copyOf(queryParsersMap); List<FilterParser> filterParsers = newArrayList(); if (namedFilterParsers != null) { Map<String, Settings> filterParserGroups = indexSettings.getGroups(IndexQueryParserService.Defaults.FILTER_PREFIX); for (Map.Entry<String, FilterParserFactory> entry : namedFilterParsers.entrySet()) { String filterParserName = entry.getKey(); FilterParserFactory filterParserFactory = entry.getValue(); Settings filterParserSettings = filterParserGroups.get(filterParserName); if (filterParserSettings == null) { filterParserSettings = EMPTY_SETTINGS; } filterParsers.add(filterParserFactory.create(filterParserName, filterParserSettings)); } } Map<String, FilterParser> filterParsersMap = newHashMap(); filterParsersMap.putAll(indicesQueriesRegistry.filterParsers()); if (filterParsers != null) { for (FilterParser filterParser : filterParsers) { add(filterParsersMap, filterParser); } } this.filterParsers = ImmutableMap.copyOf(filterParsersMap); } public void close() { cache.close(); } public String defaultField() { return this.defaultField; } public boolean queryStringLenient() { return this.queryStringLenient; } public QueryParser queryParser(String name) { return queryParsers.get(name); } public FilterParser filterParser(String name) { return filterParsers.get(name); } public ParsedQuery parse(QueryBuilder queryBuilder) throws ElasticsearchException { XContentParser parser = null; try { BytesReference bytes = queryBuilder.buildAsBytes(); parser = XContentFactory.xContent(bytes).createParser(bytes); return parse(cache.get(), parser); } catch (QueryParsingException e) { throw e; } catch (Exception e) { throw new QueryParsingException(index, "Failed to parse", e); } finally { if (parser != null) { parser.close(); } } } public ParsedQuery parse(byte[] source) throws ElasticsearchException { return parse(source, 0, source.length); } public ParsedQuery parse(byte[] source, int offset, int length) throws ElasticsearchException { XContentParser parser = null; try { parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length); return parse(cache.get(), parser); } catch (QueryParsingException e) { throw e; } catch (Exception e) { throw new QueryParsingException(index, "Failed to parse", e); } finally { if (parser != null) { parser.close(); } } } public ParsedQuery parse(BytesReference source) throws ElasticsearchException { XContentParser parser = null; try { parser = XContentFactory.xContent(source).createParser(source); return parse(cache.get(), parser); } catch (QueryParsingException e) { throw e; } catch (Exception e) { throw new QueryParsingException(index, "Failed to parse", e); } finally { if (parser != null) { parser.close(); } } } public ParsedQuery parse(String source) throws QueryParsingException { XContentParser parser = null; try { parser = XContentFactory.xContent(source).createParser(source); return parse(cache.get(), parser); } catch (QueryParsingException e) { throw e; } catch (Exception e) { throw new QueryParsingException(index, "Failed to parse [" + source + "]", e); } finally { if (parser != null) { parser.close(); } } } public ParsedQuery parse(XContentParser parser) { try { return parse(cache.get(), parser); } catch (IOException e) { throw new QueryParsingException(index, "Failed to parse", e); } } /** * Parses an inner filter, returning null if the filter should be ignored. */ @Nullable public ParsedFilter parseInnerFilter(XContentParser parser) throws IOException { QueryParseContext context = cache.get(); context.reset(parser); try { Filter filter = context.parseInnerFilter(); if (filter == null) { return null; } return new ParsedFilter(filter, context.copyNamedFilters()); } finally { context.reset(null); } } @Nullable public Query parseInnerQuery(XContentParser parser) throws IOException { QueryParseContext context = cache.get(); context.reset(parser); try { return context.parseInnerQuery(); } finally { context.reset(null); } } /** * Selectively parses a query from a top level query or query_binary json field from the specified source. */ public ParsedQuery parseQuery(BytesReference source) { try { ParsedQuery parsedQuery = null; XContentParser parser = XContentHelper.createParser(source); for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { if (token == XContentParser.Token.FIELD_NAME) { String fieldName = parser.currentName(); if ("query".equals(fieldName)) { parsedQuery = parse(parser); } else if ("query_binary".equals(fieldName) || "queryBinary".equals(fieldName)) { byte[] querySource = parser.binaryValue(); XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource); parsedQuery = parse(qSourceParser); } else { throw new QueryParsingException(index(), "request does not support [" + fieldName + "]"); } } } if (parsedQuery != null) { return parsedQuery; } } catch (QueryParsingException e) { throw e; } catch (Throwable e) { throw new QueryParsingException(index, "Failed to parse", e); } throw new QueryParsingException(index(), "Required query is missing"); } private ParsedQuery parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException { parseContext.reset(parser); try { if (strict) { parseContext.parseFlags(EnumSet.of(ParseField.Flag.STRICT)); } Query query = parseContext.parseInnerQuery(); if (query == null) { query = Queries.newMatchNoDocsQuery(); } return new ParsedQuery(query, parseContext.copyNamedFilters()); } finally { parseContext.reset(null); } } private void add(Map<String, FilterParser> map, FilterParser filterParser) { for (String name : filterParser.names()) { map.put(name.intern(), filterParser); } } private void add(Map<String, QueryParser> map, QueryParser queryParser) { for (String name : queryParser.names()) { map.put(name.intern(), queryParser); } } }
package fr.sqli.cordialement.services.impl; import com.aylien.textapi.TextAPIClient; import com.aylien.textapi.parameters.SentimentParams; import com.aylien.textapi.responses.Sentiment; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.mashape.unirest.http.HttpResponse; import com.mashape.unirest.http.Unirest; import com.mashape.unirest.http.exceptions.UnirestException; import com.vdurmont.emoji.EmojiParser; import emoji4j.Emoji; import emoji4j.EmojiUtils; import fr.sqli.cordialement.model.bean.*; import fr.sqli.cordialement.model.entity.*; import fr.sqli.cordialement.model.repository.EmailRepository; import fr.sqli.cordialement.model.repository.ScoreRepository; import fr.sqli.cordialement.services.ScoreService; import fr.sqli.cordialement.utils.EmailUtils; import io.indico.Indico; import io.indico.api.results.BatchIndicoResult; import io.indico.api.utils.IndicoException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import static emoji4j.EmojiUtils.emojify; import static fr.sqli.cordialement.utils.RepustateClient.getSentimentBulk; import static fr.sqli.cordialement.utils.ScoreUtils.*; @Service public class ScoreServiceImpl implements ScoreService { private ScoreRepository scoreRepository; private EmailRepository emailRepository; private final static String apiKeyIndico = "your-key"; private final static String apiKeyMicrosoft = "your-key"; private final static String idAppAylien = "your-key"; private final static String apiKeyAylien = "your-key"; private final static String apiKeyMeaningCloud = "your-key"; private static final Pattern htmlEntityPattern = Pattern.compile("&#x\\w+;"); @Autowired public ScoreServiceImpl(ScoreRepository scoreRepository, EmailRepository emailRepository) { this.scoreRepository = scoreRepository; this.emailRepository = emailRepository; } @Override public void generateEmailScores(Email email) { for (API api : API.values()) { if (api != API.SURVEY) { generateEmailScoresWithApi(email, api); } } } @Override public void generateEmailScoresWithApi(Email email, API api) { switch (api) { case INDICO: generateEmailScoreWithIndico(email); break; case MICROSOFT: generateEmailScoresWithMicrosoft(email); break; case AYLIEN: generateEmailScoresWithAylien(email); break; case MEANINGCLOUD: generateEmailScoresWithMeaningCloud(email); break; case REPUSTATE: generateEmailScoresWithRepustate(email); break; case SMILEYS: generateEmailScoresWithSmileys(email); break; case GLOBAL: computeEmailScore(email); break; } } /* APIs */ private void generateEmailScoreWithIndico(Email email) { List<Fragment> fragments = email.getFragments(); List<Double> scores = new ArrayList<>(); API indicoApi = API.INDICO; int i = 0; Double somme = 0.0; String[] stringFragments = fragmentListToStringArray(email.getFragments()); try { Indico indico = new Indico(apiKeyIndico); BatchIndicoResult multiple = indico.sentimentHQ.predict(stringFragments); scores = multiple.getSentimentHQ(); } catch (IndicoException | IOException e) { e.printStackTrace(); } for (Double scoreReturned : scores) { Score score = new Score(fragments.get(i), indicoApi, scoreReturned, new Date()); this.scoreRepository.save(score); i++; somme += scoreReturned; } email.addEmailScore(new EmailScore(email, indicoApi, somme/i, new Date())); this.emailRepository.save(email); } private void generateEmailScoresWithMicrosoft(Email email) { List<Fragment> fragments = email.getFragments(); API microsoftApi = API.MICROSOFT; int i = 0; Double somme = 0.0; try { String json = createMicrosoftJsonBody(email); String msAnswer = getMicrosoftJsonAnswer(json, apiKeyMicrosoft); ObjectMapper objectMapper = new ObjectMapper(); MicrosoftJsonAnswer msJsonAnswer = objectMapper.readValue(msAnswer, MicrosoftJsonAnswer.class); for (MicrosoftScores msScore : msJsonAnswer.getDocuments()) { Score score = new Score(fragments.get(i), microsoftApi, msScore.getScore(), new Date()); this.scoreRepository.save(score); i++; somme += msScore.getScore(); } email.addEmailScore(new EmailScore(email, microsoftApi, somme/i, new Date())); this.emailRepository.save(email); } catch (Exception e) { System.out.println(e.getMessage()); } } private void generateEmailScoresWithAylien(Email email) { List<Fragment> fragments = email.getFragments(); API aylienApi = API.AYLIEN; int i = 0; Double doubleScore; Double somme = 0.0; int nbCalls = 0; TextAPIClient client = new TextAPIClient(idAppAylien, apiKeyAylien); SentimentParams.Builder builder = SentimentParams.newBuilder(); try { for (Fragment f : fragments) { builder.setText(f.getText().replaceAll("\"", "\\\\\"")); Sentiment sentiment = client.sentiment(builder.build()); Score score = new Score(fragments.get(i), aylienApi, 0.0, new Date()); switch (sentiment.getPolarity()) { case "negative": doubleScore = 0.5 - sentiment.getPolarityConfidence() * 0.5; break; case "positive": doubleScore = 0.5 + sentiment.getPolarityConfidence() * 0.5; break; case "neutral": doubleScore = 0.5; break; default: doubleScore = 0.5; break; } score.setValeur(doubleScore); this.scoreRepository.save(score); nbCalls++; i++; somme += doubleScore; } email.addEmailScore(new EmailScore(email, aylienApi, somme/i, new Date())); this.emailRepository.save(email); } catch (Exception e) { System.out.println(e.getMessage()); } } private void generateEmailScoresWithMeaningCloud(Email email) { List<Fragment> fragments = email.getFragments(); API meaningCloudApi = API.MEANINGCLOUD; int i = 0; Double doubleScore; Double somme = 0.0; int nbCalls = 0; try { for (Fragment f : fragments) { /* wait 1 sec every two calls (api limit) */ if (nbCalls % 2 == 0) { TimeUnit.SECONDS.sleep(1); } HttpResponse<String> meaningCloudAnswer = Unirest.post("http://api.meaningcloud.com/sentiment-2.1") .header("content-type", "application/x-www-form-urlencoded") .body("key=" + apiKeyMeaningCloud + "&lang=en&txt=" + f.getPlainText() + "&txtf=plain") .asString(); ObjectMapper objectMapper = new ObjectMapper(); MeaningCloudJsonAnswer meaningCloudJsonAnswer = objectMapper.readValue(meaningCloudAnswer.getBody(), MeaningCloudJsonAnswer.class); Score score = new Score(fragments.get(i), meaningCloudApi, 0.0, new Date()); switch (meaningCloudJsonAnswer.getScore_tag()) { case "P+": doubleScore = 0.8 + meaningCloudJsonAnswer.getConfidence() * 0.002; break; case "P": doubleScore = 0.5 + meaningCloudJsonAnswer.getConfidence() * 0.003; break; case "N": doubleScore = 0.5 - meaningCloudJsonAnswer.getConfidence() * 0.003; break; case "N+": doubleScore = 0.2 - meaningCloudJsonAnswer.getConfidence() * 0.002; break; case "NEU": doubleScore = 0.5; break; case "NONE": doubleScore = 0.5; break; default: doubleScore = 0.5; break; } score.setValeur(doubleScore); this.scoreRepository.save(score); nbCalls++; i++; somme += doubleScore; } email.addEmailScore(new EmailScore(email, meaningCloudApi, somme/i, new Date())); this.emailRepository.save(email); } catch (Exception e) { e.printStackTrace(); } } private void generateEmailScoresWithRepustate(Email email) { List<Fragment> fragments = email.getFragments(); API repustateApi = API.REPUSTATE; int i = 0; Double somme = 0.0; Double doubleScore = 0.0; try { String repustateAnswer = getSentimentBulk(fragmentListToMap(email.getFragments())); ObjectMapper objectMapper = new ObjectMapper(); RepustateJsonAnswer repustateJsonAnswer = objectMapper.readValue(repustateAnswer, RepustateJsonAnswer.class); for (RepustateScore repustateScore : repustateJsonAnswer.getResults()) { doubleScore = (repustateScore.getScore() + 1) / 2; Score score = new Score(fragments.get(i), repustateApi, doubleScore, new Date()); this.scoreRepository.save(score); i++; somme += doubleScore; } email.addEmailScore(new EmailScore(email, repustateApi, somme/i, new Date())); this.emailRepository.save(email); } catch (Exception e) { System.out.println(e.getMessage()); } } private void generateEmailScoresWithSmileys(Email email) { if (email.getSmileysPresent()) { List<Fragment> fragments = email.getFragments(); Map<String, Double> emojisScores = getEmojisScores("static/emojis_score.csv"); API smileysApi = API.SMILEYS; int i = 0, nbFragmentsEmojis = 0; int nbEmojis; Double somme = 0.0; Double doubleScore = 0.0; for (Fragment f : fragments) { nbEmojis = 0; String text = f.getText().replaceAll("([^a-zA-Z\\d\\w]) ([^a-zA-Z\\d\\w])", "$1$2"); String hexHtmlifiedText = EmojiUtils.hexHtmlify(text); Matcher matcher = htmlEntityPattern.matcher(hexHtmlifiedText); while (matcher.find()) { String emojiCode = matcher.group(); if (EmojiUtils.isEmoji(emojiCode)) { doubleScore += emojisScores.get(emojiCode); nbEmojis++; } } if (nbEmojis != 0) { doubleScore /= nbEmojis; Score score = new Score(fragments.get(i), smileysApi, doubleScore, new Date()); this.scoreRepository.save(score); somme += doubleScore; nbFragmentsEmojis++; } i++; } email.addEmailScore(new EmailScore(email, smileysApi, somme/nbFragmentsEmojis, new Date())); this.emailRepository.save(email); } } public void computeEmailScore(Email email) { Double[] param = new Double[9]; param[0] = 0.0004343709; /* SIZE */ param[1] = 0.5951741220; /* INDICO */ param[2] = 0.1131594085; /* MICROSOFT */ param[3] = 0.3438021197; /* AYLIEN */ param[4] = -0.0244543326; /* MEANINGCLOUD */ param[5] = -0.0849043711; /* REPUSTATE */ param[6] = 0.1251683320; /* SMILEYS */ param[7] = -0.8755026602; /* BOLD */ param[8] = 0.4735644180; /* CAPS */ email.addEmailScore(new EmailScore(email, API.GLOBAL, EmailUtils.modelEmailScore(email, param), new Date())); this.emailRepository.save(email); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.co.yahoo.dataplatform.mds.blackbox; import java.io.IOException; import java.util.Set; import java.util.HashSet; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.Arguments; import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.params.provider.Arguments.arguments; import jp.co.yahoo.dataplatform.config.Configuration; import jp.co.yahoo.dataplatform.schema.objects.*; import jp.co.yahoo.dataplatform.mds.spread.expression.*; import jp.co.yahoo.dataplatform.mds.spread.column.filter.*; import jp.co.yahoo.dataplatform.mds.spread.column.*; import jp.co.yahoo.dataplatform.mds.binary.*; import jp.co.yahoo.dataplatform.mds.binary.maker.*; public class TestStringCellIndex{ public static Stream<Arguments> data1() throws IOException{ return Stream.of( arguments( createByteTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeLongColumnBinaryMaker" ) ), arguments( createByteTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeDumpLongColumnBinaryMaker" ) ), arguments( createByteTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeLongColumnBinaryMaker" ) ), arguments( createByteTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeDumpLongColumnBinaryMaker" ) ), arguments( createShortTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeLongColumnBinaryMaker" ) ), arguments( createShortTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeDumpLongColumnBinaryMaker" ) ), arguments( createShortTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeLongColumnBinaryMaker" ) ), arguments( createShortTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeDumpLongColumnBinaryMaker" ) ), arguments( createIntTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeLongColumnBinaryMaker" ) ), arguments( createIntTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeDumpLongColumnBinaryMaker" ) ), arguments( createIntTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeLongColumnBinaryMaker" ) ), arguments( createIntTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeDumpLongColumnBinaryMaker" ) ), arguments( createLongTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeLongColumnBinaryMaker" ) ), arguments( createLongTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeDumpLongColumnBinaryMaker" ) ), arguments( createLongTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeLongColumnBinaryMaker" ) ), arguments( createLongTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeDumpLongColumnBinaryMaker" ) ), arguments( createFloatTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.DumpFloatColumnBinaryMaker" ) ), arguments( createFloatTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.RangeDumpFloatColumnBinaryMaker" ) ), arguments( createFloatTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeFloatColumnBinaryMaker" ) ), arguments( createFloatTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeRangeDumpFloatColumnBinaryMaker" ) ), arguments( createFloatTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeFloatColumnBinaryMaker" ) ), arguments( createDoubleTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.DumpDoubleColumnBinaryMaker" ) ), arguments( createDoubleTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.RangeDumpDoubleColumnBinaryMaker" ) ), arguments( createDoubleTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeDoubleColumnBinaryMaker" ) ), arguments( createDoubleTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeRangeDumpDoubleColumnBinaryMaker" ) ), arguments( createDoubleTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeDoubleColumnBinaryMaker" ) ), arguments( createStringTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeStringColumnBinaryMaker" ) ), arguments( createStringTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeDumpStringColumnBinaryMaker" ) ), arguments( createStringTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.OptimizeIndexDumpStringColumnBinaryMaker" ) ), arguments( createStringTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.UnsafeOptimizeStringColumnBinaryMaker" ) ), arguments( createBytesTestData( "jp.co.yahoo.dataplatform.mds.binary.maker.DumpBytesColumnBinaryMaker" ) ) ); } public static IColumn createByteTestData( final String targetClassName ) throws IOException{ IColumn column = new PrimitiveColumn( ColumnType.BYTE , "column" ); column.add( ColumnType.BYTE , new ByteObj( (byte)0 ) , 0 ); column.add( ColumnType.BYTE , new ByteObj( (byte)1 ) , 1 ); column.add( ColumnType.BYTE , new ByteObj( (byte)2 ) , 2 ); column.add( ColumnType.BYTE , new ByteObj( (byte)3 ) , 3 ); column.add( ColumnType.BYTE , new ByteObj( (byte)4 ) , 4 ); column.add( ColumnType.BYTE , new ByteObj( (byte)5 ) , 5 ); column.add( ColumnType.BYTE , new ByteObj( (byte)6 ) , 6 ); column.add( ColumnType.BYTE , new ByteObj( (byte)7 ) , 7 ); column.add( ColumnType.BYTE , new ByteObj( (byte)8 ) , 8 ); column.add( ColumnType.BYTE , new ByteObj( (byte)9 ) , 9 ); column.add( ColumnType.BYTE , new ByteObj( (byte)20 ) , 20 ); column.add( ColumnType.BYTE , new ByteObj( (byte)21 ) , 21 ); column.add( ColumnType.BYTE , new ByteObj( (byte)22 ) , 22 ); column.add( ColumnType.BYTE , new ByteObj( (byte)23 ) , 23 ); column.add( ColumnType.BYTE , new ByteObj( (byte)24 ) , 24 ); column.add( ColumnType.BYTE , new ByteObj( (byte)25 ) , 25 ); column.add( ColumnType.BYTE , new ByteObj( (byte)26 ) , 26 ); column.add( ColumnType.BYTE , new ByteObj( (byte)27 ) , 27 ); column.add( ColumnType.BYTE , new ByteObj( (byte)28 ) , 28 ); column.add( ColumnType.BYTE , new ByteObj( (byte)29 ) , 29 ); IColumnBinaryMaker maker = FindColumnBinaryMaker.get( targetClassName ); ColumnBinaryMakerConfig defaultConfig = new ColumnBinaryMakerConfig(); ColumnBinaryMakerCustomConfigNode configNode = new ColumnBinaryMakerCustomConfigNode( "root" , defaultConfig ); ColumnBinary columnBinary = maker.toBinary( defaultConfig , null , column ); return maker.toColumn( columnBinary ); } public static IColumn createShortTestData( final String targetClassName ) throws IOException{ IColumn column = new PrimitiveColumn( ColumnType.SHORT , "column" ); column.add( ColumnType.SHORT , new ShortObj( (short)0 ) , 0 ); column.add( ColumnType.SHORT , new ShortObj( (short)1 ) , 1 ); column.add( ColumnType.SHORT , new ShortObj( (short)2 ) , 2 ); column.add( ColumnType.SHORT , new ShortObj( (short)3 ) , 3 ); column.add( ColumnType.SHORT , new ShortObj( (short)4 ) , 4 ); column.add( ColumnType.SHORT , new ShortObj( (short)5 ) , 5 ); column.add( ColumnType.SHORT , new ShortObj( (short)6 ) , 6 ); column.add( ColumnType.SHORT , new ShortObj( (short)7 ) , 7 ); column.add( ColumnType.SHORT , new ShortObj( (short)8 ) , 8 ); column.add( ColumnType.SHORT , new ShortObj( (short)9 ) , 9 ); column.add( ColumnType.SHORT , new ShortObj( (short)20 ) , 20 ); column.add( ColumnType.SHORT , new ShortObj( (short)21 ) , 21 ); column.add( ColumnType.SHORT , new ShortObj( (short)22 ) , 22 ); column.add( ColumnType.SHORT , new ShortObj( (short)23 ) , 23 ); column.add( ColumnType.SHORT , new ShortObj( (short)24 ) , 24 ); column.add( ColumnType.SHORT , new ShortObj( (short)25 ) , 25 ); column.add( ColumnType.SHORT , new ShortObj( (short)26 ) , 26 ); column.add( ColumnType.SHORT , new ShortObj( (short)27 ) , 27 ); column.add( ColumnType.SHORT , new ShortObj( (short)28 ) , 28 ); column.add( ColumnType.SHORT , new ShortObj( (short)29 ) , 29 ); IColumnBinaryMaker maker = FindColumnBinaryMaker.get( targetClassName ); ColumnBinaryMakerConfig defaultConfig = new ColumnBinaryMakerConfig(); ColumnBinaryMakerCustomConfigNode configNode = new ColumnBinaryMakerCustomConfigNode( "root" , defaultConfig ); ColumnBinary columnBinary = maker.toBinary( defaultConfig , null , column ); return maker.toColumn( columnBinary ); } public static IColumn createIntTestData( final String targetClassName ) throws IOException{ IColumn column = new PrimitiveColumn( ColumnType.INTEGER , "column" ); column.add( ColumnType.INTEGER , new IntegerObj( 0 ) , 0 ); column.add( ColumnType.INTEGER , new IntegerObj( 1 ) , 1 ); column.add( ColumnType.INTEGER , new IntegerObj( 2 ) , 2 ); column.add( ColumnType.INTEGER , new IntegerObj( 3 ) , 3 ); column.add( ColumnType.INTEGER , new IntegerObj( 4 ) , 4 ); column.add( ColumnType.INTEGER , new IntegerObj( 5 ) , 5 ); column.add( ColumnType.INTEGER , new IntegerObj( 6 ) , 6 ); column.add( ColumnType.INTEGER , new IntegerObj( 7 ) , 7 ); column.add( ColumnType.INTEGER , new IntegerObj( 8 ) , 8 ); column.add( ColumnType.INTEGER , new IntegerObj( 9 ) , 9 ); column.add( ColumnType.INTEGER , new IntegerObj( 20 ) , 20 ); column.add( ColumnType.INTEGER , new IntegerObj( 21 ) , 21 ); column.add( ColumnType.INTEGER , new IntegerObj( 22 ) , 22 ); column.add( ColumnType.INTEGER , new IntegerObj( 23 ) , 23 ); column.add( ColumnType.INTEGER , new IntegerObj( 24 ) , 24 ); column.add( ColumnType.INTEGER , new IntegerObj( 25 ) , 25 ); column.add( ColumnType.INTEGER , new IntegerObj( 26 ) , 26 ); column.add( ColumnType.INTEGER , new IntegerObj( 27 ) , 27 ); column.add( ColumnType.INTEGER , new IntegerObj( 28 ) , 28 ); column.add( ColumnType.INTEGER , new IntegerObj( 29 ) , 29 ); IColumnBinaryMaker maker = FindColumnBinaryMaker.get( targetClassName ); ColumnBinaryMakerConfig defaultConfig = new ColumnBinaryMakerConfig(); ColumnBinaryMakerCustomConfigNode configNode = new ColumnBinaryMakerCustomConfigNode( "root" , defaultConfig ); ColumnBinary columnBinary = maker.toBinary( defaultConfig , null , column ); return maker.toColumn( columnBinary ); } public static IColumn createLongTestData( final String targetClassName ) throws IOException{ IColumn column = new PrimitiveColumn( ColumnType.LONG , "column" ); column.add( ColumnType.LONG , new LongObj( 0 ) , 0 ); column.add( ColumnType.LONG , new LongObj( 1 ) , 1 ); column.add( ColumnType.LONG , new LongObj( 2 ) , 2 ); column.add( ColumnType.LONG , new LongObj( 3 ) , 3 ); column.add( ColumnType.LONG , new LongObj( 4 ) , 4 ); column.add( ColumnType.LONG , new LongObj( 5 ) , 5 ); column.add( ColumnType.LONG , new LongObj( 6 ) , 6 ); column.add( ColumnType.LONG , new LongObj( 7 ) , 7 ); column.add( ColumnType.LONG , new LongObj( 8 ) , 8 ); column.add( ColumnType.LONG , new LongObj( 9 ) , 9 ); column.add( ColumnType.LONG , new LongObj( 20 ) , 20 ); column.add( ColumnType.LONG , new LongObj( 21 ) , 21 ); column.add( ColumnType.LONG , new LongObj( 22 ) , 22 ); column.add( ColumnType.LONG , new LongObj( 23 ) , 23 ); column.add( ColumnType.LONG , new LongObj( 24 ) , 24 ); column.add( ColumnType.LONG , new LongObj( 25 ) , 25 ); column.add( ColumnType.LONG , new LongObj( 26 ) , 26 ); column.add( ColumnType.LONG , new LongObj( 27 ) , 27 ); column.add( ColumnType.LONG , new LongObj( 28 ) , 28 ); column.add( ColumnType.LONG , new LongObj( 29 ) , 29 ); IColumnBinaryMaker maker = FindColumnBinaryMaker.get( targetClassName ); ColumnBinaryMakerConfig defaultConfig = new ColumnBinaryMakerConfig(); ColumnBinaryMakerCustomConfigNode configNode = new ColumnBinaryMakerCustomConfigNode( "root" , defaultConfig ); ColumnBinary columnBinary = maker.toBinary( defaultConfig , null , column ); return maker.toColumn( columnBinary ); } public static IColumn createFloatTestData( final String targetClassName ) throws IOException{ IColumn column = new PrimitiveColumn( ColumnType.FLOAT , "column" ); column.add( ColumnType.FLOAT , new FloatObj( 0.0f ) , 0 ); column.add( ColumnType.FLOAT , new FloatObj( 1.0f ) , 1 ); column.add( ColumnType.FLOAT , new FloatObj( 2.0f ) , 2 ); column.add( ColumnType.FLOAT , new FloatObj( 3.0f ) , 3 ); column.add( ColumnType.FLOAT , new FloatObj( 4.0f ) , 4 ); column.add( ColumnType.FLOAT , new FloatObj( 5.0f ) , 5 ); column.add( ColumnType.FLOAT , new FloatObj( 6.0f ) , 6 ); column.add( ColumnType.FLOAT , new FloatObj( 7.0f ) , 7 ); column.add( ColumnType.FLOAT , new FloatObj( 8.0f ) , 8 ); column.add( ColumnType.FLOAT , new FloatObj( 9.0f ) , 9 ); column.add( ColumnType.FLOAT , new FloatObj( 20.0f ) , 20 ); column.add( ColumnType.FLOAT , new FloatObj( 21.0f ) , 21 ); column.add( ColumnType.FLOAT , new FloatObj( 22.0f ) , 22 ); column.add( ColumnType.FLOAT , new FloatObj( 23.0f ) , 23 ); column.add( ColumnType.FLOAT , new FloatObj( 24.0f ) , 24 ); column.add( ColumnType.FLOAT , new FloatObj( 25.0f ) , 25 ); column.add( ColumnType.FLOAT , new FloatObj( 26.0f ) , 26 ); column.add( ColumnType.FLOAT , new FloatObj( 27.0f ) , 27 ); column.add( ColumnType.FLOAT , new FloatObj( 28.0f ) , 28 ); column.add( ColumnType.FLOAT , new FloatObj( 29.0f ) , 29 ); IColumnBinaryMaker maker = FindColumnBinaryMaker.get( targetClassName ); ColumnBinaryMakerConfig defaultConfig = new ColumnBinaryMakerConfig(); ColumnBinaryMakerCustomConfigNode configNode = new ColumnBinaryMakerCustomConfigNode( "root" , defaultConfig ); ColumnBinary columnBinary = maker.toBinary( defaultConfig , null , column ); return maker.toColumn( columnBinary ); } public static IColumn createDoubleTestData( final String targetClassName ) throws IOException{ IColumn column = new PrimitiveColumn( ColumnType.DOUBLE , "column" ); column.add( ColumnType.DOUBLE , new DoubleObj( 0.0d ) , 0 ); column.add( ColumnType.DOUBLE , new DoubleObj( 1.0d ) , 1 ); column.add( ColumnType.DOUBLE , new DoubleObj( 2.0d ) , 2 ); column.add( ColumnType.DOUBLE , new DoubleObj( 3.0d ) , 3 ); column.add( ColumnType.DOUBLE , new DoubleObj( 4.0d ) , 4 ); column.add( ColumnType.DOUBLE , new DoubleObj( 5.0d ) , 5 ); column.add( ColumnType.DOUBLE , new DoubleObj( 6.0d ) , 6 ); column.add( ColumnType.DOUBLE , new DoubleObj( 7.0d ) , 7 ); column.add( ColumnType.DOUBLE , new DoubleObj( 8.0d ) , 8 ); column.add( ColumnType.DOUBLE , new DoubleObj( 9.0d ) , 9 ); column.add( ColumnType.DOUBLE , new DoubleObj( 20.0d ) , 20 ); column.add( ColumnType.DOUBLE , new DoubleObj( 21.0d ) , 21 ); column.add( ColumnType.DOUBLE , new DoubleObj( 22.0d ) , 22 ); column.add( ColumnType.DOUBLE , new DoubleObj( 23.0d ) , 23 ); column.add( ColumnType.DOUBLE , new DoubleObj( 24.0d ) , 24 ); column.add( ColumnType.DOUBLE , new DoubleObj( 25.0d ) , 25 ); column.add( ColumnType.DOUBLE , new DoubleObj( 26.0d ) , 26 ); column.add( ColumnType.DOUBLE , new DoubleObj( 27.0d ) , 27 ); column.add( ColumnType.DOUBLE , new DoubleObj( 28.0d ) , 28 ); column.add( ColumnType.DOUBLE , new DoubleObj( 29.0d ) , 29 ); IColumnBinaryMaker maker = FindColumnBinaryMaker.get( targetClassName ); ColumnBinaryMakerConfig defaultConfig = new ColumnBinaryMakerConfig(); ColumnBinaryMakerCustomConfigNode configNode = new ColumnBinaryMakerCustomConfigNode( "root" , defaultConfig ); ColumnBinary columnBinary = maker.toBinary( defaultConfig , null , column ); return maker.toColumn( columnBinary ); } public static IColumn createStringTestData( final String targetClassName ) throws IOException{ IColumn column = new PrimitiveColumn( ColumnType.STRING , "column" ); column.add( ColumnType.STRING , new StringObj( "0" ) , 0 ); column.add( ColumnType.STRING , new StringObj( "1" ) , 1 ); column.add( ColumnType.STRING , new StringObj( "2" ) , 2 ); column.add( ColumnType.STRING , new StringObj( "3" ) , 3 ); column.add( ColumnType.STRING , new StringObj( "4" ) , 4 ); column.add( ColumnType.STRING , new StringObj( "5" ) , 5 ); column.add( ColumnType.STRING , new StringObj( "6" ) , 6 ); column.add( ColumnType.STRING , new StringObj( "7" ) , 7 ); column.add( ColumnType.STRING , new StringObj( "8" ) , 8 ); column.add( ColumnType.STRING , new StringObj( "9" ) , 9 ); column.add( ColumnType.STRING , new StringObj( "20" ) , 20 ); column.add( ColumnType.STRING , new StringObj( "21" ) , 21 ); column.add( ColumnType.STRING , new StringObj( "22" ) , 22 ); column.add( ColumnType.STRING , new StringObj( "23" ) , 23 ); column.add( ColumnType.STRING , new StringObj( "24" ) , 24 ); column.add( ColumnType.STRING , new StringObj( "25" ) , 25 ); column.add( ColumnType.STRING , new StringObj( "26" ) , 26 ); column.add( ColumnType.STRING , new StringObj( "27" ) , 27 ); column.add( ColumnType.STRING , new StringObj( "28" ) , 28 ); column.add( ColumnType.STRING , new StringObj( "29" ) , 29 ); IColumnBinaryMaker maker = FindColumnBinaryMaker.get( targetClassName ); ColumnBinaryMakerConfig defaultConfig = new ColumnBinaryMakerConfig(); ColumnBinaryMakerCustomConfigNode configNode = new ColumnBinaryMakerCustomConfigNode( "root" , defaultConfig ); ColumnBinary columnBinary = maker.toBinary( defaultConfig , null , column ); return maker.toColumn( columnBinary ); } public static IColumn createBytesTestData( final String targetClassName ) throws IOException{ IColumn column = new PrimitiveColumn( ColumnType.BYTES , "column" ); column.add( ColumnType.BYTES , new BytesObj( "0".getBytes() ) , 0 ); column.add( ColumnType.BYTES , new BytesObj( "1".getBytes() ) , 1 ); column.add( ColumnType.BYTES , new BytesObj( "2".getBytes() ) , 2 ); column.add( ColumnType.BYTES , new BytesObj( "3".getBytes() ) , 3 ); column.add( ColumnType.BYTES , new BytesObj( "4".getBytes() ) , 4 ); column.add( ColumnType.BYTES , new BytesObj( "5".getBytes() ) , 5 ); column.add( ColumnType.BYTES , new BytesObj( "6".getBytes() ) , 6 ); column.add( ColumnType.BYTES , new BytesObj( "7".getBytes() ) , 7 ); column.add( ColumnType.BYTES , new BytesObj( "8".getBytes() ) , 8 ); column.add( ColumnType.BYTES , new BytesObj( "9".getBytes() ) , 9 ); column.add( ColumnType.BYTES , new BytesObj( "20".getBytes() ) , 20 ); column.add( ColumnType.BYTES , new BytesObj( "21".getBytes() ) , 21 ); column.add( ColumnType.BYTES , new BytesObj( "22".getBytes() ) , 22 ); column.add( ColumnType.BYTES , new BytesObj( "23".getBytes() ) , 23 ); column.add( ColumnType.BYTES , new BytesObj( "24".getBytes() ) , 24 ); column.add( ColumnType.BYTES , new BytesObj( "25".getBytes() ) , 25 ); column.add( ColumnType.BYTES , new BytesObj( "26".getBytes() ) , 26 ); column.add( ColumnType.BYTES , new BytesObj( "27".getBytes() ) , 27 ); column.add( ColumnType.BYTES , new BytesObj( "28".getBytes() ) , 28 ); column.add( ColumnType.BYTES , new BytesObj( "29".getBytes() ) , 29 ); IColumnBinaryMaker maker = FindColumnBinaryMaker.get( targetClassName ); ColumnBinaryMakerConfig defaultConfig = new ColumnBinaryMakerConfig(); ColumnBinaryMakerCustomConfigNode configNode = new ColumnBinaryMakerCustomConfigNode( "root" , defaultConfig ); ColumnBinary columnBinary = maker.toBinary( defaultConfig , null , column ); return maker.toColumn( columnBinary ); } public static void dumpFilterResult( final boolean[] result ){ System.out.println( "-----------------------" ); System.out.println( "String cell index test result." ); System.out.println( "-----------------------" ); for( int i = 0 ; i < result.length ; i++ ){ System.out.println( String.format( "index:%d = %s" , i , Boolean.toString( result[i] ) ) ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_perfectMatch_1( final IColumn column ) throws IOException{ int[] mustReadIndex = { 23 }; IFilter filter = new PerfectMatchStringFilter( "23" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_partialMatch_1( final IColumn column ) throws IOException{ int[] mustReadIndex = { 2 , 20 , 21 , 22 , 23 , 24 , 25 , 26 , 27 , 28 , 29 }; IFilter filter = new PartialMatchStringFilter( "2" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_partialMatch_2( final IColumn column ) throws IOException{ int[] mustReadIndex = { 3 , 23 }; IFilter filter = new PartialMatchStringFilter( "3" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_partialMatch_3( final IColumn column ) throws IOException{ int[] mustReadIndex = { 9 , 29 }; IFilter filter = new PartialMatchStringFilter( "9" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_partialMatch_4( final IColumn column ) throws IOException{ int[] mustReadIndex = { 29 }; IFilter filter = new PartialMatchStringFilter( "29" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_forwardMatch_1( final IColumn column ) throws IOException{ int[] mustReadIndex = { 0 }; IFilter filter = new ForwardMatchStringFilter( "0" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_forwardMatch_2( final IColumn column ) throws IOException{ int[] mustReadIndex = { 2 , 20 }; IFilter filter = new ForwardMatchStringFilter( "2" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_backwardMatch_1( final IColumn column ) throws IOException{ int[] mustReadIndex = { 0 , 20 }; IFilter filter = new BackwardMatchStringFilter( "0" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_backwardMatch_2( final IColumn column ) throws IOException{ int[] mustReadIndex = { 20 }; IFilter filter = new BackwardMatchStringFilter( "20" ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_compareString_1( final IColumn column ) throws IOException{ int[] mustReadIndex = { 0 , 1 }; IFilter filter = new RangeStringCompareFilter( "0" , true , "1" , true ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_compareString_2( final IColumn column ) throws IOException{ int[] mustReadIndex = { 1 }; IFilter filter = new RangeStringCompareFilter( "0" , false , "1" , true ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_compareString_3( final IColumn column ) throws IOException{ int[] mustReadIndex = { 0 }; IFilter filter = new RangeStringCompareFilter( "0" , true , "1" , false ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_compareString_4( final IColumn column ) throws IOException{ int[] mustReadIndex = { 1 }; IFilter filter = new RangeStringCompareFilter( "0" , false , "2" , false ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_compareString_5( final IColumn column ) throws IOException{ int[] mustReadIndex = { 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 , 15 , 16 , 17 , 18 , 19 , 20 , 21 , 22 , 23 , 24 , 25 , 26 , 27 , 28 , 29 }; IFilter filter = new RangeStringCompareFilter( "0" , true , "1" , true , true ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_compareString_6( final IColumn column ) throws IOException{ int[] mustReadIndex = { 0 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 , 15 , 16 , 17 , 18 , 19 , 20 , 21 , 22 , 23 , 24 , 25 , 26 , 27 , 28 , 29 }; IFilter filter = new RangeStringCompareFilter( "0" , false , "1" , true , true ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_compareString_7( final IColumn column ) throws IOException{ int[] mustReadIndex = { 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 , 15 ,16 , 17 , 18 , 19 , 21 , 22 , 23 , 24 , 25 , 26 , 27 , 28 , 29 }; IFilter filter = new RangeStringCompareFilter( "0" , true , "1" , false , true ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_compareString_8( final IColumn column ) throws IOException{ int[] mustReadIndex = { 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 , 15 , 16 , 17 , 18 , 19 , 21 , 22 , 23 , 24 , 25 , 26 , 27 , 28 , 29 }; IFilter filter = new RangeStringCompareFilter( "0" , false , "1" , false , true ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_dictionaryString_1( final IColumn column ) throws IOException{ Set<String> dic = new HashSet<String>(); dic.add( "9" ); dic.add( "20" ); int[] mustReadIndex = { 9 , 20 }; IFilter filter = new StringDictionaryFilter( dic ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } @ParameterizedTest @MethodSource( "data1" ) public void T_dictionaryString_2( final IColumn column ) throws IOException{ Set<String> dic = new HashSet<String>(); dic.add( "0" ); dic.add( "1" ); dic.add( "2" ); dic.add( "3" ); dic.add( "4" ); dic.add( "5" ); dic.add( "6" ); dic.add( "7" ); dic.add( "8" ); dic.add( "9" ); dic.add( "20" ); dic.add( "21" ); dic.add( "22" ); dic.add( "23" ); dic.add( "24" ); dic.add( "25" ); dic.add( "26" ); dic.add( "27" ); dic.add( "28" ); dic.add( "29" ); int[] mustReadIndex = { 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 20 , 21 , 22 , 23 , 24 , 25 , 26 , 27 , 28 , 29 }; IFilter filter = new StringDictionaryFilter( dic ); boolean[] filterResult = new boolean[30]; filterResult = column.filter( filter , filterResult ); if( filterResult == null ){ assertTrue( true ); return; } //dumpFilterResult( filterResult ); for( int i = 0 ; i < mustReadIndex.length ; i++ ){ assertTrue( filterResult[mustReadIndex[i]] ); } } }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. * * Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common * Development and Distribution License("CDDL") (collectively, the * "License"). You may not use this file except in compliance with the * License. You can obtain a copy of the License at * http://www.netbeans.org/cddl-gplv2.html * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the * specific language governing permissions and limitations under the * License. When distributing the software, include this License Header * Notice in each file and include the License file at * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the GPL Version 2 section of the License file that * accompanied this code. If applicable, add the following below the * License Header, with the fields enclosed by brackets [] replaced by * your own identifying information: * "Portions Copyrighted [year] [name of copyright owner]" * * Contributor(s): * * The Original Software is NetBeans. The Initial Developer of the Original * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun * Microsystems, Inc. All Rights Reserved. * * If you wish your version of this file to be governed by only the CDDL * or only the GPL Version 2, indicate your decision by adding * "[Contributor] elects to include this software in this distribution * under the [CDDL or GPL Version 2] license." If you do not indicate a * single choice of license, a recipient has the option to distribute * your version of this file under either the CDDL, the GPL Version 2 or * to extend the choice of license to its licensees as provided above. * However, if you add GPL Version 2 code and therefore, elected the GPL * Version 2 license, then the option applies only if the new code is * made subject to such option by the copyright holder. */ package com.bearsoft.org.netbeans.modules.form; import com.bearsoft.org.netbeans.modules.form.RADVisualComponent.MenuType; import com.bearsoft.org.netbeans.modules.form.layoutsupport.LayoutConstraints; import com.bearsoft.org.netbeans.modules.form.layoutsupport.LayoutNode; import com.bearsoft.org.netbeans.modules.form.layoutsupport.LayoutSupportDelegate; import com.bearsoft.org.netbeans.modules.form.layoutsupport.LayoutSupportManager; import com.eas.client.forms.containers.ScrollPane; import com.eas.client.forms.menu.CheckMenuItem; import com.eas.client.forms.menu.Menu; import com.eas.client.forms.menu.MenuBar; import com.eas.client.forms.menu.MenuItem; import com.eas.client.forms.menu.MenuSeparator; import com.eas.client.forms.menu.RadioMenuItem; import java.awt.Component; import java.awt.Container; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.EnumMap; import java.util.Map; import javax.swing.JComponent; import javax.swing.JRootPane; import javax.swing.RootPaneContainer; import org.openide.ErrorManager; public class RADVisualContainer<C extends Container> extends RADVisualComponent<C> implements ComponentContainer { private java.util.List<RADVisualComponent<?>> subComponents = new ArrayList<>(10); private LayoutSupportManager layoutSupport; private LayoutNode layoutNode; private Method containerDelegateGetter; private boolean noContainerDelegate; private static Map<MenuType, Class<?>[]> supportedMenus; @Override protected void setBeanInstance(C beanInstance) { containerDelegateGetter = null; noContainerDelegate = false; super.setBeanInstance(beanInstance); // need new layout support for new container bean if (layoutSupport == null) { layoutSupport = new LayoutSupportManager(this); } } @Override public void setInModel(boolean in) { boolean alreadyIn = isInModel(); super.setInModel(in); if (in && !alreadyIn && layoutSupport != null) { // deferred initialization from pre-creation try { layoutSupport.initializeLayoutDelegate(); } catch (Exception ex) { // [not reported - but very unlikely to happen - only for new container with custom layout] ErrorManager.getDefault().notify(ErrorManager.INFORMATIONAL, ex); layoutSupport.setUnknownLayoutDelegate();//(false); } } } public void setLayoutSupportDelegate(LayoutSupportDelegate layoutDelegate) throws Exception { layoutSupport.setLayoutDelegate(layoutDelegate); setLayoutNodeReference(null); } public LayoutSupportManager getLayoutSupport() { return layoutSupport; } void checkLayoutSupport() { if (layoutSupport == null) { layoutSupport = new LayoutSupportManager(this); } } private void refillContainerInstance() { Container cont = getContainerDelegate(getBeanInstance()); cont.removeAll(); if (!(cont instanceof ScrollPane)) { // Issue 128797 cont.setLayout(null); // Issue 77904 } subComponents.stream().forEach((sub) -> { cont.add(sub.getBeanInstance()); }); } public boolean hasDedicatedLayoutSupport() { return layoutSupport != null && layoutSupport.isDedicated(); } /** * @param container container. * @return The JavaBean visual container represented by this * RADVisualComponent */ public JComponent getContainerDelegate(Component container) { if (container instanceof RootPaneContainer/* && container.getClass().getName().startsWith("javax.swing.")*/) // NOI18N { return (JComponent) ((RootPaneContainer) container).getContentPane(); } if (container.getClass().equals(JRootPane.class)) { return (JComponent) ((JRootPane) container).getContentPane(); } JComponent containerDelegate = (JComponent) container; // Do not attempt to find container delegate if the classes // don't match. This can happen when ViewConverter was used. // Happens for JApplet, for example. if (getBeanClass().isAssignableFrom(container.getClass())) { Method m = getContainerDelegateMethod(); if (m != null) { try { containerDelegate = (JComponent) m.invoke(container, new Object[0]); if ((containerDelegate == null) && (container instanceof ScrollPane)) { ScrollPane scrollPane = (ScrollPane) container; scrollPane.setViewportView(null); // force recreation of viewport containerDelegate = (JComponent) m.invoke(container, new Object[0]); } } catch (Exception ex) { org.openide.ErrorManager.getDefault().notify(org.openide.ErrorManager.INFORMATIONAL, ex); } } } return containerDelegate; } public Method getContainerDelegateMethod() { if (containerDelegateGetter == null && !noContainerDelegate) { String delegateGetterName = getContainerDelegateGetterName(); if (delegateGetterName == null && (RootPaneContainer.class.isAssignableFrom(getBeanClass()) || JRootPane.class.isAssignableFrom(getBeanClass()))) { delegateGetterName = "getContentPane"; // NOI18N } if (delegateGetterName != null) { try { containerDelegateGetter = getBeanClass().getMethod( delegateGetterName, new Class<?>[]{}); } catch (NoSuchMethodException ex) { org.openide.ErrorManager.getDefault().notify(org.openide.ErrorManager.INFORMATIONAL, ex); } } else { noContainerDelegate = true; } } return containerDelegateGetter; } String getContainerDelegateGetterName() { Object value = getBeanInfo().getBeanDescriptor().getValue("containerDelegate"); // NOI18N if (value instanceof String) { return (String) value; } else { return null; } } public void setLayoutNodeReference(LayoutNode aNode) { layoutNode = aNode; } public LayoutNode getLayoutNodeReference() { return layoutNode; } boolean shouldHaveLayoutNode() { return layoutSupport != null && layoutSupport.shouldHaveNode(); } public boolean canAddComponent(Class<?> compClass) { if (isMenuTypeComponent()) { // this is a menu container accepting certain types of menus Class<?>[] possibleClasses = getPossibleSubmenus(getMenuType(getBeanClass())); if (possibleClasses != null) { for (Class<?> cls : possibleClasses) { if (cls.isAssignableFrom(compClass)) { return true; } } } return false; } else if (getMenuType(compClass) != null && !MenuBar.class.isAssignableFrom(getBeanClass())) { // otherwise don't accept menu components return false; } else if (Component.class.isAssignableFrom(compClass)) { // visual component can be added to visual container // exception: avoid adding components to scroll pane that already contains something if (ScrollPane.class.isAssignableFrom(getBeanClass()) && (((ScrollPane) getBeanInstance()).getViewport() != null) && (((ScrollPane) getBeanInstance()).getViewport().getView() != null)) { return false; } return true; } return false; } boolean canHaveMenu(Class<?> menuClass) { return MenuBar.class.isAssignableFrom(menuClass) && RootPaneContainer.class.isAssignableFrom(getBeanClass()); } private static Class<?>[] getPossibleSubmenus(MenuType menuContainerType) { if (supportedMenus == null) { supportedMenus = new EnumMap<>(MenuType.class); supportedMenus.put(MenuType.JMenu, new Class<?>[]{MenuItem.class, CheckMenuItem.class, RadioMenuItem.class, Menu.class, MenuSeparator.class}); supportedMenus.put(MenuType.JPopupMenu, new Class<?>[]{MenuItem.class, CheckMenuItem.class, RadioMenuItem.class, Menu.class, MenuSeparator.class}); } return supportedMenus.get(menuContainerType); } // ----------------------------------------------------------------------------- // SubComponents Management /** * @return visual subcomponents (not the menu component) */ public RADVisualComponent<?>[] getSubComponents() { RADVisualComponent<?>[] components = new RADVisualComponent<?>[subComponents.size()]; subComponents.toArray(components); return components; } public RADVisualComponent<?> getSubComponent(int index) { return subComponents.get(index); } // the following methods implement ComponentContainer interface /** * @return all subcomponents (including the menu component) */ @Override public RADVisualComponent<?>[] getSubBeans() { int n = subComponents.size(); RADVisualComponent<?>[] components = new RADVisualComponent<?>[n]; subComponents.toArray(components); return components; } @Override public int getSubBeansCount() { return subComponents.size(); } @Override public void initSubComponents(RADComponent<?>[] initComponents) { if (subComponents == null) { subComponents = new ArrayList<>(initComponents.length); } else { subComponents.clear(); } for (int i = 0; i < initComponents.length; i++) { RADComponent<?> radComp = initComponents[i]; subComponents.add((RADVisualComponent<?>) radComp); radComp.setParent(this); } if (layoutSupport == null) { refillContainerInstance(); } } @Override public void reorderSubComponents(int[] perm) { RADVisualComponent<?>[] components = new RADVisualComponent<?>[subComponents.size()]; LayoutConstraints<?>[] constraints; if (layoutSupport != null) { layoutSupport.removeAll(); constraints = new LayoutConstraints<?>[subComponents.size()]; } else { constraints = null; } for (int i = 0; i < perm.length; i++) { RADVisualComponent<?> radComp = subComponents.get(i); components[perm[i]] = radComp; if (constraints != null) { constraints[perm[i]] = layoutSupport.getStoredConstraints(radComp); } } subComponents.clear(); subComponents.addAll(java.util.Arrays.asList(components)); if (layoutSupport != null) { layoutSupport.addComponents(components, constraints, 0); } else { refillContainerInstance(); } } @Override public void add(RADComponent<?> comp) { add((RADVisualComponent<?>) comp, -1); } public void add(RADVisualComponent<?> radComp, int index) { if (index == -1) { index = subComponents.size(); } subComponents.add(index, radComp); if (layoutSupport == null) { Component comp = radComp.getBeanInstance(); getContainerDelegate(getBeanInstance()).add(comp, index); } radComp.setParent(this); // force constraints properties creation radComp.getConstraintsProperties(); } @Override public void remove(RADComponent<?> radComp) { if (radComp instanceof RADVisualComponent<?>) { int index = subComponents.indexOf(radComp); if (layoutSupport != null) { layoutSupport.removeComponent((RADVisualComponent<?>) radComp, index); } else { getContainerDelegate(getBeanInstance()).remove(index); } if (subComponents.remove((RADVisualComponent<?>) radComp)) { radComp.setParent(null); } } } @Override public int getIndexOf(RADComponent<?> comp) { return subComponents.lastIndexOf(comp); } }
/** * Copyright The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.TreeSet; import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor; import org.apache.hadoop.hbase.replication.ReplicationLoadSink; import org.apache.hadoop.hbase.replication.ReplicationLoadSource; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Strings; /** * This class is used for exporting current state of load on a RegionServer. */ @InterfaceAudience.Public public class ServerLoad { private int stores = 0; private int storefiles = 0; private int storeUncompressedSizeMB = 0; private int storefileSizeMB = 0; private int memstoreSizeMB = 0; private long storefileIndexSizeKB = 0; private long readRequestsCount = 0; private long filteredReadRequestsCount = 0; private long writeRequestsCount = 0; private int rootIndexSizeKB = 0; private int totalStaticIndexSizeKB = 0; private int totalStaticBloomSizeKB = 0; private long totalCompactingKVs = 0; private long currentCompactedKVs = 0; private long reportTime = 0; @InterfaceAudience.Private public ServerLoad(ClusterStatusProtos.ServerLoad serverLoad) { this.serverLoad = serverLoad; this.reportTime = System.currentTimeMillis(); for (ClusterStatusProtos.RegionLoad rl: serverLoad.getRegionLoadsList()) { stores += rl.getStores(); storefiles += rl.getStorefiles(); storeUncompressedSizeMB += rl.getStoreUncompressedSizeMB(); storefileSizeMB += rl.getStorefileSizeMB(); memstoreSizeMB += rl.getMemStoreSizeMB(); storefileIndexSizeKB += rl.getStorefileIndexSizeKB(); readRequestsCount += rl.getReadRequestsCount(); filteredReadRequestsCount += rl.getFilteredReadRequestsCount(); writeRequestsCount += rl.getWriteRequestsCount(); rootIndexSizeKB += rl.getRootIndexSizeKB(); totalStaticIndexSizeKB += rl.getTotalStaticIndexSizeKB(); totalStaticBloomSizeKB += rl.getTotalStaticBloomSizeKB(); totalCompactingKVs += rl.getTotalCompactingKVs(); currentCompactedKVs += rl.getCurrentCompactedKVs(); } } // NOTE: Function name cannot start with "get" because then an OpenDataException is thrown because // HBaseProtos.ServerLoad cannot be converted to an open data type(see HBASE-5967). /* @return the underlying ServerLoad protobuf object */ @InterfaceAudience.Private public ClusterStatusProtos.ServerLoad obtainServerLoadPB() { return serverLoad; } protected ClusterStatusProtos.ServerLoad serverLoad; /* @return number of requests since last report. */ public long getNumberOfRequests() { return serverLoad.getNumberOfRequests(); } public boolean hasNumberOfRequests() { return serverLoad.hasNumberOfRequests(); } /* @return total Number of requests from the start of the region server. */ public long getTotalNumberOfRequests() { return serverLoad.getTotalNumberOfRequests(); } public boolean hasTotalNumberOfRequests() { return serverLoad.hasTotalNumberOfRequests(); } /* @return the amount of used heap, in MB. */ public int getUsedHeapMB() { return serverLoad.getUsedHeapMB(); } public boolean hasUsedHeapMB() { return serverLoad.hasUsedHeapMB(); } /* @return the maximum allowable size of the heap, in MB. */ public int getMaxHeapMB() { return serverLoad.getMaxHeapMB(); } public boolean hasMaxHeapMB() { return serverLoad.hasMaxHeapMB(); } public int getStores() { return stores; } public int getStorefiles() { return storefiles; } public int getStoreUncompressedSizeMB() { return storeUncompressedSizeMB; } /** * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * Use {@link #getStorefileSizeMB()} instead. */ @Deprecated public int getStorefileSizeInMB() { return storefileSizeMB; } public int getStorefileSizeMB() { return storefileSizeMB; } /** * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * Use {@link #getMemStoreSizeMB()} instead. */ @Deprecated public int getMemstoreSizeInMB() { return memstoreSizeMB; } public int getMemStoreSizeMB() { return memstoreSizeMB; } /** * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * Use {@link #getStorefileIndexSizeKB()} instead. */ @Deprecated public int getStorefileIndexSizeInMB() { // Return value divided by 1024 return (int) (getStorefileIndexSizeKB() >> 10); } public long getStorefileIndexSizeKB() { return storefileIndexSizeKB; } public long getReadRequestsCount() { return readRequestsCount; } public long getFilteredReadRequestsCount() { return filteredReadRequestsCount; } public long getWriteRequestsCount() { return writeRequestsCount; } public int getRootIndexSizeKB() { return rootIndexSizeKB; } public int getTotalStaticIndexSizeKB() { return totalStaticIndexSizeKB; } public int getTotalStaticBloomSizeKB() { return totalStaticBloomSizeKB; } public long getTotalCompactingKVs() { return totalCompactingKVs; } public long getCurrentCompactedKVs() { return currentCompactedKVs; } /** * @return the number of regions */ public int getNumberOfRegions() { return serverLoad.getRegionLoadsCount(); } public int getInfoServerPort() { return serverLoad.getInfoServerPort(); } /** * Call directly from client such as hbase shell * @return the list of ReplicationLoadSource */ public List<ReplicationLoadSource> getReplicationLoadSourceList() { return ProtobufUtil.toReplicationLoadSourceList(serverLoad.getReplLoadSourceList()); } /** * Call directly from client such as hbase shell * @return ReplicationLoadSink */ public ReplicationLoadSink getReplicationLoadSink() { if (serverLoad.hasReplLoadSink()) { return ProtobufUtil.toReplicationLoadSink(serverLoad.getReplLoadSink()); } else { return null; } } /** * Originally, this method factored in the effect of requests going to the * server as well. However, this does not interact very well with the current * region rebalancing code, which only factors number of regions. For the * interim, until we can figure out how to make rebalancing use all the info * available, we're just going to make load purely the number of regions. * * @return load factor for this server */ public int getLoad() { // See above comment // int load = numberOfRequests == 0 ? 1 : numberOfRequests; // load *= numberOfRegions == 0 ? 1 : numberOfRegions; // return load; return getNumberOfRegions(); } /** * @return region load metrics */ public Map<byte[], RegionLoad> getRegionsLoad() { Map<byte[], RegionLoad> regionLoads = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (ClusterStatusProtos.RegionLoad rl : serverLoad.getRegionLoadsList()) { RegionLoad regionLoad = new RegionLoad(rl); regionLoads.put(regionLoad.getName(), regionLoad); } return regionLoads; } /** * Return the RegionServer-level coprocessors * @return string array of loaded RegionServer-level coprocessors */ public String[] getRegionServerCoprocessors() { List<Coprocessor> list = obtainServerLoadPB().getCoprocessorsList(); String [] ret = new String[list.size()]; int i = 0; for (Coprocessor elem : list) { ret[i++] = elem.getName(); } return ret; } /** * Return the RegionServer-level and Region-level coprocessors * @return string array of loaded RegionServer-level and * Region-level coprocessors */ public String[] getRsCoprocessors() { // Need a set to remove duplicates, but since generated Coprocessor class // is not Comparable, make it a Set<String> instead of Set<Coprocessor> TreeSet<String> coprocessSet = new TreeSet<>(); for (Coprocessor coprocessor : obtainServerLoadPB().getCoprocessorsList()) { coprocessSet.add(coprocessor.getName()); } return coprocessSet.toArray(new String[coprocessSet.size()]); } /** * @return number of requests per second received since the last report */ public double getRequestsPerSecond() { return getNumberOfRequests(); } /** * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = Strings.appendKeyValue(new StringBuilder(), "requestsPerSecond", Double.valueOf(getRequestsPerSecond())); Strings.appendKeyValue(sb, "numberOfOnlineRegions", Integer.valueOf(getNumberOfRegions())); sb = Strings.appendKeyValue(sb, "usedHeapMB", Integer.valueOf(this.getUsedHeapMB())); sb = Strings.appendKeyValue(sb, "maxHeapMB", Integer.valueOf(getMaxHeapMB())); sb = Strings.appendKeyValue(sb, "numberOfStores", Integer.valueOf(this.stores)); sb = Strings.appendKeyValue(sb, "numberOfStorefiles", Integer.valueOf(this.storefiles)); sb = Strings.appendKeyValue(sb, "storefileUncompressedSizeMB", Integer.valueOf(this.storeUncompressedSizeMB)); sb = Strings.appendKeyValue(sb, "storefileSizeMB", Integer.valueOf(this.storefileSizeMB)); if (this.storeUncompressedSizeMB != 0) { sb = Strings.appendKeyValue( sb, "compressionRatio", String.format("%.4f", (float) this.storefileSizeMB / (float) this.storeUncompressedSizeMB)); } sb = Strings.appendKeyValue(sb, "memstoreSizeMB", Integer.valueOf(this.memstoreSizeMB)); sb = Strings.appendKeyValue(sb, "storefileIndexSizeKB", Long.valueOf(this.storefileIndexSizeKB)); sb = Strings.appendKeyValue(sb, "readRequestsCount", Long.valueOf(this.readRequestsCount)); sb = Strings.appendKeyValue(sb, "filteredReadRequestsCount", Long.valueOf(this.filteredReadRequestsCount)); sb = Strings.appendKeyValue(sb, "writeRequestsCount", Long.valueOf(this.writeRequestsCount)); sb = Strings.appendKeyValue(sb, "rootIndexSizeKB", Integer.valueOf(this.rootIndexSizeKB)); sb = Strings.appendKeyValue(sb, "totalStaticIndexSizeKB", Integer.valueOf(this.totalStaticIndexSizeKB)); sb = Strings.appendKeyValue(sb, "totalStaticBloomSizeKB", Integer.valueOf(this.totalStaticBloomSizeKB)); sb = Strings.appendKeyValue(sb, "totalCompactingKVs", Long.valueOf(this.totalCompactingKVs)); sb = Strings.appendKeyValue(sb, "currentCompactedKVs", Long.valueOf(this.currentCompactedKVs)); float compactionProgressPct = Float.NaN; if (this.totalCompactingKVs > 0) { compactionProgressPct = Float.valueOf((float) this.currentCompactedKVs / this.totalCompactingKVs); } sb = Strings.appendKeyValue(sb, "compactionProgressPct", compactionProgressPct); String[] coprocessorStrings = getRsCoprocessors(); if (coprocessorStrings != null) { sb = Strings.appendKeyValue(sb, "coprocessors", Arrays.toString(coprocessorStrings)); } return sb.toString(); } public static final ServerLoad EMPTY_SERVERLOAD = new ServerLoad(ClusterStatusProtos.ServerLoad.newBuilder().build()); public long getReportTime() { return reportTime; } @Override public int hashCode() { return Objects.hashCode(stores, storefiles, storeUncompressedSizeMB, storefileSizeMB, memstoreSizeMB, storefileIndexSizeKB, readRequestsCount, filteredReadRequestsCount, writeRequestsCount, rootIndexSizeKB, totalStaticIndexSizeKB, totalStaticBloomSizeKB, totalCompactingKVs, currentCompactedKVs); } @Override public boolean equals(Object other) { if (other == this) return true; if (other instanceof ServerLoad) { ServerLoad sl = ((ServerLoad) other); return stores == sl.stores && storefiles == sl.storefiles && storeUncompressedSizeMB == sl.storeUncompressedSizeMB && storefileSizeMB == sl.storefileSizeMB && memstoreSizeMB == sl.memstoreSizeMB && storefileIndexSizeKB == sl.storefileIndexSizeKB && readRequestsCount == sl.readRequestsCount && filteredReadRequestsCount == sl.filteredReadRequestsCount && writeRequestsCount == sl.writeRequestsCount && rootIndexSizeKB == sl.rootIndexSizeKB && totalStaticIndexSizeKB == sl.totalStaticIndexSizeKB && totalStaticBloomSizeKB == sl.totalStaticBloomSizeKB && totalCompactingKVs == sl.totalCompactingKVs && currentCompactedKVs == sl.currentCompactedKVs; } return false; } }