gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2014 Heinrich Reimer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.heinrichreimersoftware.materialdrawer.structure; import android.content.Context; import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.widget.ArrayAdapter; import com.heinrichreimersoftware.materialdrawer.drawable.RoundedAvatarDrawable; /** * Object to be used with {@link com.heinrichreimersoftware.materialdrawer.DrawerAdapter} and {@link com.heinrichreimersoftware.materialdrawer.DrawerView} to display a drawer item. * Can hold an image, a primary text, a secondary text and a listener. */ public class DrawerItem { public static final int ICON = 1; public static final int AVATAR = 2; public static final int SMALL_AVATAR = 3; public static final int SINGLE_LINE = 3; public static final int TWO_LINE = 4; public static final int THREE_LINE = 5; private boolean mIsHeader = false; private long mId = -1; private Drawable mImage; private int mImageMode = -1; private String mTextPrimary; private String mTextSecondary; private int mTextMode = -1; private OnItemClickListener mOnClickListener; private ArrayAdapter<DrawerItem> mAdapter; /** * Sets whether the drawer item is a header * * @param isHeader True if the drawer item should be a header, false otherwise. */ public DrawerItem setIsHeader(boolean isHeader) { mIsHeader = isHeader; notifyDataChanged(); return this; } /** * Gets whether the drawer item is a header * * @return True if the drawer item is a header, false otherwise. */ public boolean isHeader() { return mIsHeader; } /** * Sets whether the drawer item is a divider * * @param isDivider True if the drawer item should be a divider, false otherwise. */ @Deprecated public DrawerItem setIsDivider(boolean isDivider) { setIsHeader(isDivider); return this; } /** * Gets whether the drawer item is a divider * * @return True if the drawer item is a divider, false otherwise. */ @Deprecated public boolean isDivider() { return isHeader(); } /** * Sets an ID the drawer item * * @param id ID to set */ public DrawerItem setId(long id) { mId = id; return this; } /** * Gets the ID of the drawer item * * @return ID of the drawer item */ public long getId() { return mId; } /** * Sets an image with a given image mode to the drawer item * * @param image Image to set * @param imageMode Image mode to set */ public DrawerItem setImage(Drawable image, int imageMode) { mImage = image; setImageMode(imageMode); notifyDataChanged(); return this; } /** * Sets an image to the drawer item * * @param image Image to set */ public DrawerItem setImage(Drawable image) { setImage(image, ICON); notifyDataChanged(); return this; } /** * Sets an image with a given image mode to the drawer item * * @param image Image to set * @param imageMode Image mode to set */ public DrawerItem setImage(Context context, Bitmap image, int imageMode) { return setImage(new BitmapDrawable(context.getResources(), image), imageMode); } /** * Sets an image to the drawer item * * @param image Image to set */ public DrawerItem setImage(Context context, Bitmap image) { return setImage(new BitmapDrawable(context.getResources(), image)); } /** * Sets a rounded image with a given image mode to the drawer item * * @param image Image to set * @param imageMode Image mode to set */ public DrawerItem setRoundedImage(BitmapDrawable image, int imageMode) { return setImage(new RoundedAvatarDrawable(image.getBitmap()), imageMode); } /** * Sets a rounded image to the drawer item * * @param image Image to set */ public DrawerItem setRoundedImage(BitmapDrawable image) { return setImage(new RoundedAvatarDrawable(image.getBitmap()), AVATAR); } /** * Sets a rounded image with a given image mode to the drawer item * * @param image Image to set * @param imageMode Image mode to set */ public DrawerItem setRoundedImage(Context context, Bitmap image, int imageMode) { return setImage(new RoundedAvatarDrawable(new BitmapDrawable(context.getResources(), image).getBitmap()), imageMode); } /** * Sets a rounded image to the drawer item * * @param image Image to set */ public DrawerItem setRoundedImage(Context context, Bitmap image) { return setImage(new RoundedAvatarDrawable(new BitmapDrawable(context.getResources(), image).getBitmap()), AVATAR); } /** * Gets the image of the drawer item * * @return Image of the drawer item */ public Drawable getImage() { return mImage; } /** * Gets whether the drawer item has an image set to it * * @return True if the drawer item has an image set to it, false otherwise. */ public boolean hasImage() { return mImage != null; } /** * Removes the image from the drawer item */ public DrawerItem removeImage() { mImage = null; notifyDataChanged(); return this; } /** * Sets an image mode to the drawer item * * @param imageMode Image mode to set */ public DrawerItem setImageMode(int imageMode) { if (imageMode != ICON && imageMode != AVATAR && imageMode != SMALL_AVATAR) { throw new IllegalArgumentException("Image mode must be either ICON or AVATAR."); } mImageMode = imageMode; notifyDataChanged(); return this; } /** * Gets the image mode of the drawer item * * @return Image mode of the drawer item */ public int getImageMode() { return mImageMode; } /** * Gets whether the drawer item has an image mode set to it * * @return True if the drawer item has an image mode set to it, false otherwise. */ public boolean hasImageMode() { return mImageMode > 0; } /** * Resets the image mode from the drawer item */ public DrawerItem resetImageMode() { mImageMode = ICON; notifyDataChanged(); return this; } /** * Sets a primary text to the drawer item * * @param textPrimary Primary text to set */ public DrawerItem setTextPrimary(String textPrimary) { mTextPrimary = textPrimary; notifyDataChanged(); return this; } /** * Gets the primary text of the drawer item * * @return Primary text of the drawer item */ public String getTextPrimary() { return mTextPrimary; } /** * Gets whether the drawer item has a primary text set to it * * @return True if the drawer item has a primary text set to it, false otherwise. */ public boolean hasTextPrimary() { return mTextPrimary != null && !mTextPrimary.equals(""); } /** * Removes the primary text from the drawer item */ public DrawerItem removeTextPrimary() { mTextPrimary = null; notifyDataChanged(); return this; } /** * Sets a secondary text with a given text mode to the drawer item * * @param textSecondary Secondary text to set * @param textMode Text mode to set */ public DrawerItem setTextSecondary(String textSecondary, int textMode) { mTextSecondary = textSecondary; setTextMode(textMode); notifyDataChanged(); return this; } /** * Sets a secondary text to the drawer item * * @param textSecondary Secondary text to set */ public DrawerItem setTextSecondary(String textSecondary) { setTextSecondary(textSecondary, TWO_LINE); return this; } /** * Gets the secondary text of the drawer item * * @return Secondary text of the drawer item */ public String getTextSecondary() { return mTextSecondary; } /** * Gets whether the drawer item has a secondary text set to it * * @return True if the drawer item has a secondary text set to it, false otherwise. */ public boolean hasTextSecondary() { return mTextSecondary != null && !mTextSecondary.equals(""); } /** * Removes the secondary text from the drawer item */ public DrawerItem removeTextSecondary() { mTextSecondary = null; notifyDataChanged(); return this; } /** * Sets a text mode to the drawer item * * @param textMode Text mode to set */ public DrawerItem setTextMode(int textMode) { if (textMode != SINGLE_LINE && textMode != TWO_LINE && textMode != THREE_LINE) { throw new IllegalArgumentException("Image mode must be either SINGLE_LINE, TWO_LINE or THREE_LINE."); } mTextMode = textMode; notifyDataChanged(); return this; } /** * Gets the text mode of the drawer item * * @return Text mode of the drawer item */ public int getTextMode() { return mTextMode; } /** * Gets whether the drawer item has a text mode set to it * * @return True if the drawer item has a text mode set to it, false otherwise. */ public boolean hasTextMode() { return mTextMode > 0; } /** * Resets the text mode from the drawer item */ public DrawerItem resetTextMode() { mTextMode = SINGLE_LINE; notifyDataChanged(); return this; } /** * Sets a click listener to the drawer item * * @param listener Listener to set */ public DrawerItem setOnItemClickListener(OnItemClickListener listener) { mOnClickListener = listener; notifyDataChanged(); return this; } /** * Gets the click listener of the drawer item * * @return Click listener of the drawer item */ public OnItemClickListener getOnItemClickListener() { return mOnClickListener; } /** * Gets whether the drawer item has a click listener set to it * * @return True if the drawer item has a click listener set to it, false otherwise. */ public boolean hasOnItemClickListener() { return mOnClickListener != null; } /** * Removes the click listener from the drawer item */ public DrawerItem removeOnItemClickListener() { mOnClickListener = null; notifyDataChanged(); return this; } /** * Attaches the drawer item to an adapter * * @param adapter Adapter to attach to */ public DrawerItem attachTo(ArrayAdapter<DrawerItem> adapter) { mAdapter = adapter; notifyDataChanged(); return this; } /** * Detaches the drawer item from its adapter */ public DrawerItem detach() { mAdapter = null; return this; } protected void notifyDataChanged() { if (mAdapter != null) { mAdapter.notifyDataSetChanged(); } } public interface OnItemClickListener { void onClick(DrawerItem item, long id, int position); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2beta2/storage.proto package com.google.privacy.dlp.v2beta2; /** * <pre> * General identifier of a data field in a storage service. * </pre> * * Protobuf type {@code google.privacy.dlp.v2beta2.FieldId} */ public final class FieldId extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2beta2.FieldId) FieldIdOrBuilder { private static final long serialVersionUID = 0L; // Use FieldId.newBuilder() to construct. private FieldId(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FieldId() { name_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FieldId( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2beta2.DlpStorage.internal_static_google_privacy_dlp_v2beta2_FieldId_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2beta2.DlpStorage.internal_static_google_privacy_dlp_v2beta2_FieldId_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2beta2.FieldId.class, com.google.privacy.dlp.v2beta2.FieldId.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <pre> * Name describing the field. * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * <pre> * Name describing the field. * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2beta2.FieldId)) { return super.equals(obj); } com.google.privacy.dlp.v2beta2.FieldId other = (com.google.privacy.dlp.v2beta2.FieldId) obj; boolean result = true; result = result && getName() .equals(other.getName()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.FieldId parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta2.FieldId parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta2.FieldId parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2beta2.FieldId prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * General identifier of a data field in a storage service. * </pre> * * Protobuf type {@code google.privacy.dlp.v2beta2.FieldId} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2beta2.FieldId) com.google.privacy.dlp.v2beta2.FieldIdOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2beta2.DlpStorage.internal_static_google_privacy_dlp_v2beta2_FieldId_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2beta2.DlpStorage.internal_static_google_privacy_dlp_v2beta2_FieldId_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2beta2.FieldId.class, com.google.privacy.dlp.v2beta2.FieldId.Builder.class); } // Construct using com.google.privacy.dlp.v2beta2.FieldId.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2beta2.DlpStorage.internal_static_google_privacy_dlp_v2beta2_FieldId_descriptor; } public com.google.privacy.dlp.v2beta2.FieldId getDefaultInstanceForType() { return com.google.privacy.dlp.v2beta2.FieldId.getDefaultInstance(); } public com.google.privacy.dlp.v2beta2.FieldId build() { com.google.privacy.dlp.v2beta2.FieldId result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.privacy.dlp.v2beta2.FieldId buildPartial() { com.google.privacy.dlp.v2beta2.FieldId result = new com.google.privacy.dlp.v2beta2.FieldId(this); result.name_ = name_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2beta2.FieldId) { return mergeFrom((com.google.privacy.dlp.v2beta2.FieldId)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2beta2.FieldId other) { if (other == com.google.privacy.dlp.v2beta2.FieldId.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.privacy.dlp.v2beta2.FieldId parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.privacy.dlp.v2beta2.FieldId) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * <pre> * Name describing the field. * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Name describing the field. * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Name describing the field. * </pre> * * <code>string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * <pre> * Name describing the field. * </pre> * * <code>string name = 1;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <pre> * Name describing the field. * </pre> * * <code>string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2beta2.FieldId) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2beta2.FieldId) private static final com.google.privacy.dlp.v2beta2.FieldId DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2beta2.FieldId(); } public static com.google.privacy.dlp.v2beta2.FieldId getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FieldId> PARSER = new com.google.protobuf.AbstractParser<FieldId>() { public FieldId parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FieldId(input, extensionRegistry); } }; public static com.google.protobuf.Parser<FieldId> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FieldId> getParserForType() { return PARSER; } public com.google.privacy.dlp.v2beta2.FieldId getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.contentful.java.cma.model; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; /** * What constraints are applied to this rule? */ public class CMAConstraint { private CMAConstraint[] and; private CMAConstraint[] or; private CMAConstraint[] not; private Equals equals; private FieldKeyPath[] fieldKeyPaths; /** * Which constraints have to be all satisfied? * * @return the and constraints. */ public CMAConstraint[] getAnd() { return and; } /** * Set a new constraints which have all to be satisfied. * * @param and constraints to be satisfied. * @return this constraint to for chaining. */ public CMAConstraint setAnd(CMAConstraint... and) { this.and = and; return this; } /** * Which array of constraints where only one needs to be satisfied? * * @return the or constraints. */ public CMAConstraint[] getOr() { return or; } /** * Set a new list of constraints where only one be satisfied. * * @param or constraints of which one needs to be satisfied. * @return this constraint to for chaining. */ public CMAConstraint setOr(CMAConstraint... or) { this.or = or; return this; } /** * Which constraints do need to be not satisfied? * <p> * This effectively negates all the constraints. * * @return a array of not satisfied constraints. */ public CMAConstraint[] getNot() { return not; } /** * Set a new array of constraints which must not be satisfied. * * @param not a constraint not to be true. * @return this constraint to for chaining. */ public CMAConstraint setNot(CMAConstraint... not) { this.not = not; return this; } /** * Which condition has to be satisfied to satisfy this constraint? * * @return an equals condition. */ public Equals getEquals() { return equals; } /** * Update the condition to be satisfied to satisfy this constraint. * * @param equals the equals constraint. * @return this instance for chaining. */ public CMAConstraint setEquals(Equals equals) { this.equals = equals; return this; } /** * On which fieldKeyPaths does this constraint act? * * @return a array of fieldKeyPaths to be act upon. */ public FieldKeyPath[] getFieldKeyPaths() { return fieldKeyPaths; } /** * Set the array of fieldKeyPaths to be used. * * @param fieldKeyPaths the new fieldKeyPaths to be set. * @return this instance for chaining. */ public CMAConstraint setFieldKeyPaths(FieldKeyPath... fieldKeyPaths) { this.fieldKeyPaths = fieldKeyPaths; return this; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "CMAConstraint { " + "and = " + Arrays.toString(getAnd()) + ", " + "equals = " + getEquals() + ", " + "not = " + Arrays.toString(getNot()) + ", " + "or = " + Arrays.toString(getOr()) + ", " + "fieldKeyPaths = " + Arrays.toString(getFieldKeyPaths()) + " " + "}"; } /** * Define a path to a field. */ public static class FieldKeyPath { private String doc; /** * @return the doc part of the path. */ public String getDoc() { return doc; } /** * Set the path. * * @param doc the path to the document, like "fields.name.nopenotok". * @return this instance for chaining. **/ public FieldKeyPath setDoc(String doc) { this.doc = doc; return this; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "FieldKeyPath { " + "doc = " + getDoc() + " " + "}"; } } /** * Create an equals deciding whether the path to a field constraints a given value. */ public static class Equals extends ArrayList<Object> { private static final long serialVersionUID = 4011458721151983325L; /** * @return which path this equals is targeting. */ public FieldKeyPath getPath() { if (size() == 2) { final Object path = this.get(0); if (path instanceof FieldKeyPath) { return (FieldKeyPath) path; } else if (path instanceof Map) { Map<String, String> mappedPath = (Map<String, String>) path; final FieldKeyPath objectifiedPath = new FieldKeyPath() .setDoc(mappedPath.get("doc")); setPath(objectifiedPath); return objectifiedPath; } // missing else: fall through to return `null`, not a valid state. } return null; } /** * Update the fieldKeyPath component. * * @param fieldKeyPath the new fieldKeyPath to be applied. * @return this instance for chaining */ public Equals setPath(FieldKeyPath fieldKeyPath) { if (size() == 0) { add(fieldKeyPath); } else { set(0, fieldKeyPath); } return this; } /** * @return the value to be used for comparison. */ public String getValue() { if (size() == 2) { return (String) this.get(1); } else { return null; } } /** * Update the value to be used in comparison. * * @param value the value. * @return this instance for chaining. */ public Equals setValue(String value) { switch (size()) { case 0: add(null); break; case 1: add(value); break; case 2: set(1, value); break; default: break; } return this; } /** * @return Human readable representation of this instance. */ @Override public String toString() { return "Equals {" + "doc = " + getPath() != null ? getPath().doc : "<null>" + ", " + "value = " + getValue() + " " + "}"; } } }
/* * Copyright (c) 2014, Cloudera and Intel, Inc. All Rights Reserved. * * Cloudera, Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the * License. */ package com.cloudera.oryx.app.pmml; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import com.google.common.base.Preconditions; import org.dmg.pmml.Array; import org.dmg.pmml.DataDictionary; import org.dmg.pmml.DataField; import org.dmg.pmml.DataType; import org.dmg.pmml.Extension; import org.dmg.pmml.FieldName; import org.dmg.pmml.FieldUsageType; import org.dmg.pmml.MiningField; import org.dmg.pmml.MiningSchema; import org.dmg.pmml.OpType; import org.dmg.pmml.PMML; import org.dmg.pmml.TypeDefinitionField; import org.dmg.pmml.Value; import com.cloudera.oryx.app.schema.CategoricalValueEncodings; import com.cloudera.oryx.app.schema.InputSchema; import com.cloudera.oryx.common.text.TextUtils; /** * General app tier PMML-related utility methods. */ public final class AppPMMLUtils { private AppPMMLUtils() {} public static String getExtensionValue(PMML pmml, String name) { for (Extension extension : pmml.getExtensions()) { if (name.equals(extension.getName())) { return extension.getValue(); } } return null; } /** * @param pmml PMML model to query for extensions * @param name name of extension to query * @return content of the extension, parsed as if it were a PMML {@link Array}: * space-separated values, with PMML quoting rules */ public static List<String> getExtensionContent(PMML pmml, String name) { for (Extension extension : pmml.getExtensions()) { if (name.equals(extension.getName())) { List<?> content = extension.getContent(); Preconditions.checkArgument(content.size() <= 1); if (content.isEmpty()) { return Collections.emptyList(); } return Arrays.asList(TextUtils.parsePMMLDelimited(content.get(0).toString())); } } return null; } /** * @param pmml PMML model to add extension to, with no content. It may possibly duplicate * existing extensions. * @param key extension key * @param value extension value */ public static void addExtension(PMML pmml, String key, Object value) { pmml.getExtensions().add(new Extension().setName(key).setValue(value.toString())); } /** * @param pmml PMML model to add extension to, with a single {@code String} content and no value. * The content is encoded as if they were being added to a PMML {@link Array} and are * space-separated with PMML quoting rules * @param key extension key * @param content list of values to add as a {@code String} */ public static void addExtensionContent(PMML pmml, String key, Collection<?> content) { if (content.isEmpty()) { return; } String joined = TextUtils.joinPMMLDelimited(content); pmml.getExtensions().add(new Extension().setName(key).addContent(joined)); } /** * @param values {@code double} value to make into a PMML {@link Array} * @return PMML {@link Array} representation */ public static Array toArray(double... values) { List<Double> valueList = new ArrayList<>(values.length); for (double value : values) { valueList.add(value); } String arrayValue = TextUtils.joinPMMLDelimitedNumbers(valueList); return new Array(Array.Type.REAL, arrayValue).setN(valueList.size()); } /** * @param schema {@link InputSchema} whose information should be encoded in PMML * @return a {@link MiningSchema} representing the information contained in an * {@link InputSchema} */ public static MiningSchema buildMiningSchema(InputSchema schema) { return buildMiningSchema(schema, null); } /** * @param schema {@link InputSchema} whose information should be encoded in PMML * @param importances optional feature importances. May be {@code null}, or else the size * of the array must match the number of predictors in the schema, which may be * less than the total number of features. * @return a {@link MiningSchema} representing the information contained in an * {@link InputSchema} */ public static MiningSchema buildMiningSchema(InputSchema schema, double[] importances) { Preconditions.checkArgument( importances == null || (importances.length == schema.getNumPredictors())); List<String> featureNames = schema.getFeatureNames(); List<MiningField> miningFields = new ArrayList<>(); for (int featureIndex = 0; featureIndex < featureNames.size(); featureIndex++) { String featureName = featureNames.get(featureIndex); MiningField field = new MiningField(FieldName.create(featureName)); if (schema.isNumeric(featureName)) { field.setOpType(OpType.CONTINUOUS); field.setUsageType(FieldUsageType.ACTIVE); } else if (schema.isCategorical(featureName)) { field.setOpType(OpType.CATEGORICAL); field.setUsageType(FieldUsageType.ACTIVE); } else { // ID, or ignored field.setUsageType(FieldUsageType.SUPPLEMENTARY); } if (schema.hasTarget() && schema.isTarget(featureName)) { // Override to PREDICTED field.setUsageType(FieldUsageType.PREDICTED); } // Will be active if and only if it's a predictor if (field.getUsageType() == FieldUsageType.ACTIVE && importances != null) { int predictorIndex = schema.featureToPredictorIndex(featureIndex); field.setImportance(importances[predictorIndex]); } miningFields.add(field); } return new MiningSchema(miningFields); } /** * @param miningSchema {@link MiningSchema} from a model * @return names of features in order */ public static List<String> getFeatureNames(MiningSchema miningSchema) { List<String> names = new ArrayList<>(); for (MiningField field : miningSchema.getMiningFields()) { names.add(field.getName().getValue()); } return names; } /** * @param miningSchema {@link MiningSchema} from a model * @return index of the {@link FieldUsageType#PREDICTED} feature */ public static Integer findTargetIndex(MiningSchema miningSchema) { List<MiningField> miningFields = miningSchema.getMiningFields(); for (int i = 0; i < miningFields.size(); i++) { if (miningFields.get(i).getUsageType() == FieldUsageType.PREDICTED) { return i; } } return null; } public static DataDictionary buildDataDictionary( InputSchema schema, CategoricalValueEncodings categoricalValueEncodings) { List<String> featureNames = schema.getFeatureNames(); List<DataField> dataFields = new ArrayList<>(); for (int featureIndex = 0; featureIndex < featureNames.size(); featureIndex++) { String featureName = featureNames.get(featureIndex); OpType opType; DataType dataType; if (schema.isNumeric(featureName)) { opType = OpType.CONTINUOUS; dataType = DataType.DOUBLE; } else if (schema.isCategorical(featureName)) { opType = OpType.CATEGORICAL; dataType = DataType.STRING; } else { // Don't know opType = null; dataType = null; } DataField field = new DataField(FieldName.create(featureName), opType, dataType); if (schema.isCategorical(featureName)) { Collection<String> valuesOrderedByEncoding = new TreeMap<>(categoricalValueEncodings.getEncodingValueMap(featureIndex)).values(); for (String value : valuesOrderedByEncoding) { field.getValues().add(new Value(value)); } } dataFields.add(field); } DataDictionary dictionary = new DataDictionary(dataFields); dictionary.setNumberOfFields(dataFields.size()); return dictionary; } /** * @param dictionary {@link DataDictionary} from model * @return names of features in order */ public static List<String> getFeatureNames(DataDictionary dictionary) { List<DataField> dataFields = dictionary.getDataFields(); Preconditions.checkArgument(dataFields != null && !dataFields.isEmpty(), "No fields in DataDictionary"); List<String> names = new ArrayList<>(dataFields.size()); for (TypeDefinitionField field : dataFields) { names.add(field.getName().getValue()); } return names; } public static CategoricalValueEncodings buildCategoricalValueEncodings( DataDictionary dictionary) { Map<Integer,Collection<String>> indexToValues = new HashMap<>(); List<DataField> dataFields = dictionary.getDataFields(); for (int featureIndex = 0; featureIndex < dataFields.size(); featureIndex++) { TypeDefinitionField field = dataFields.get(featureIndex); Collection<Value> values = field.getValues(); if (values != null && !values.isEmpty()) { Collection<String> categoricalValues = new ArrayList<>(); for (Value value : values) { categoricalValues.add(value.getValue()); } indexToValues.put(featureIndex, categoricalValues); } } return new CategoricalValueEncodings(indexToValues); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oodt.cas.filemgr.system; import org.apache.http.auth.AuthSchemeProvider; import org.apache.http.client.HttpClient; import org.apache.http.client.HttpRequestRetryHandler; import org.apache.http.client.config.RequestConfig; import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.impl.client.HttpClients; import org.apache.http.protocol.HttpContext; import org.apache.oodt.cas.cli.CmdLineUtility; import org.apache.oodt.cas.filemgr.datatransfer.DataTransfer; import org.apache.oodt.cas.filemgr.exceptions.FileManagerException; import org.apache.oodt.cas.filemgr.structs.Element; import org.apache.oodt.cas.filemgr.structs.FileTransferStatus; import org.apache.oodt.cas.filemgr.structs.Product; import org.apache.oodt.cas.filemgr.structs.ProductPage; import org.apache.oodt.cas.filemgr.structs.ProductType; import org.apache.oodt.cas.filemgr.structs.Query; import org.apache.oodt.cas.filemgr.structs.Reference; import org.apache.oodt.cas.filemgr.structs.exceptions.CatalogException; import org.apache.oodt.cas.filemgr.structs.exceptions.ConnectionException; import org.apache.oodt.cas.filemgr.structs.exceptions.DataTransferException; import org.apache.oodt.cas.filemgr.structs.exceptions.RepositoryManagerException; import org.apache.oodt.cas.filemgr.structs.exceptions.ValidationLayerException; import org.apache.oodt.cas.filemgr.structs.exceptions.VersioningException; import org.apache.oodt.cas.filemgr.structs.query.ComplexQuery; import org.apache.oodt.cas.filemgr.structs.query.QueryResult; import org.apache.oodt.cas.filemgr.util.GenericFileManagerObjectFactory; import org.apache.oodt.cas.filemgr.util.XmlRpcStructFactory; import org.apache.oodt.cas.filemgr.versioning.Versioner; import org.apache.oodt.cas.metadata.Metadata; import org.apache.xmlrpc.XmlRpcClient; import org.apache.xmlrpc.XmlRpcClientException; import org.apache.xmlrpc.XmlRpcException; import org.apache.xmlrpc.XmlRpcTransport; import org.apache.xmlrpc.XmlRpcTransportFactory; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URL; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Vector; import java.util.logging.Level; import java.util.logging.Logger; /** * @author mattmann (Chris Mattmann) * @author bfoster (Brian Foster) * @version $Revision$ * @deprecated replaced by avro-rpc * <p/> * <p> The XML RPC based file manager client. </p> */ @Deprecated public class XmlRpcFileManagerClient implements FileManagerClient { /* our xml rpc client */ private transient XmlRpcClient client = null; /* our log stream */ private static Logger LOG = Logger.getLogger(XmlRpcFileManagerClient.class .getName()); /* file manager url */ private URL fileManagerUrl = null; /* data transferer needed if client is request to move files itself */ private DataTransfer dataTransfer = null; public XmlRpcFileManagerClient(final URL url) throws ConnectionException { this(url, true); } /** * <p> Constructs a new XmlRpcFileManagerClient with the given <code>url</code>. </p> * * @param url The url pointer to the xml rpc file manager service. * @param testConnection Whether or not to check if server at given url is alive. */ public XmlRpcFileManagerClient(final URL url, boolean testConnection) throws ConnectionException { // set up the configuration, if there is any if (System.getProperty("org.apache.oodt.cas.filemgr.properties") != null) { String configFile = System .getProperty("org.apache.oodt.cas.filemgr.properties"); LOG.log(Level.INFO, "Loading File Manager Configuration Properties from: [" + configFile + "]"); try { System.getProperties().load( new FileInputStream(new File(configFile))); } catch (Exception e) { LOG.log(Level.INFO, "Error loading configuration properties from: [" + configFile + "]"); } } XmlRpcTransportFactory transportFactory = new XmlRpcTransportFactory() { public XmlRpcTransport createTransport() throws XmlRpcClientException { HttpRequestRetryHandler myRetryHandler = new HttpRequestRetryHandler() { public boolean retryRequest( IOException exception, int count, HttpContext context){ if (count < Integer .getInteger( "org.apache.oodt.cas.filemgr.system.xmlrpc.connection.retries", 3)) { try { Thread .sleep(Integer .getInteger( "org.apache.oodt.cas.filemgr.system.xmlrpc.connection.retry.interval.seconds", 0) * 1000); return true; } catch (Exception ignored) { } } return false; } }; RequestConfig config = RequestConfig.custom() .setSocketTimeout(Integer .getInteger( "org.apache.oodt.cas.filemgr.system.xmlrpc.connectionTimeout.minutes", 20) * 60 * 1000) .setConnectTimeout(Integer .getInteger( "org.apache.oodt.cas.filemgr.system.xmlrpc.requestTimeout.minutes", 60) * 60 * 1000) .build(); Registry<AuthSchemeProvider> r = RegistryBuilder.<AuthSchemeProvider>create().build(); HttpClient client = HttpClients.custom().setRetryHandler(myRetryHandler).setDefaultAuthSchemeRegistry(r).setDefaultRequestConfig(config).build(); CommonsXmlRpcTransport transport = new CommonsXmlRpcTransport(url, client); transport .setConnectionTimeout(Integer .getInteger( "org.apache.oodt.cas.filemgr.system.xmlrpc.connectionTimeout.minutes", 20) * 60 * 1000); transport .setTimeout(Integer .getInteger( "org.apache.oodt.cas.filemgr.system.xmlrpc.requestTimeout.minutes", 60) * 60 * 1000); return transport; } public void setProperty(String arg0, Object arg1) { } }; client = new XmlRpcClient(url, transportFactory); fileManagerUrl = url; if (testConnection && !isAlive()) { throw new ConnectionException("Exception connecting to filemgr: [" + this.fileManagerUrl + "]"); } } public boolean refreshConfigAndPolicy() { boolean success; Vector<Object> argList = new Vector<Object>(); try { success = (Boolean) client.execute("filemgr.refreshConfigAndPolicy", argList); } catch (XmlRpcException e) { LOG.log(Level.WARNING, "XmlRpcException when connecting to filemgr: [" + this.fileManagerUrl + "]"); success = false; } catch (IOException e) { LOG.log(Level.WARNING, "IOException when connecting to filemgr: [" + this.fileManagerUrl + "]"); success = false; } return success; } public boolean isAlive() { boolean connected; Vector<Object> argList = new Vector<Object>(); try { connected = (Boolean) client.execute("filemgr.isAlive", argList); } catch (XmlRpcException e) { LOG.log(Level.WARNING, "XmlRpcException when connecting to filemgr: [" + this.fileManagerUrl + "]"); connected = false; } catch (IOException e) { LOG.log(Level.WARNING, "IOException when connecting to filemgr: [" + this.fileManagerUrl + "]"); connected = false; } return connected; } public boolean transferringProduct(Product product) throws DataTransferException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); boolean success; try { success = (Boolean) client.execute("filemgr.transferringProduct", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } return success; } public boolean removeProductTransferStatus(Product product) throws DataTransferException { Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); Vector<Object> argList = new Vector<Object>(); argList.add(productHash); boolean success; try { success = (Boolean) client.execute( "filemgr.removeProductTransferStatus", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } return success; } public boolean isTransferComplete(Product product) throws DataTransferException { Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); Vector<Object> argList = new Vector<Object>(); argList.add(productHash); boolean success; try { success = (Boolean) client.execute("filemgr.isTransferComplete", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } return success; } public boolean moveProduct(Product product, String newPath) throws DataTransferException { Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); Vector<Object> argList = new Vector<Object>(); argList.add(productHash); argList.add(newPath); boolean success; try { success = (Boolean) client.execute("filemgr.moveProduct", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } return success; } public boolean modifyProduct(Product product) throws CatalogException { Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); Vector<Object> argList = new Vector<Object>(); argList.add(productHash); boolean success; try { success = (Boolean) client.execute("filemgr.modifyProduct", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return success; } public boolean removeProduct(Product product) throws CatalogException { Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); Vector<Object> argList = new Vector<Object>(); argList.add(productHash); boolean success; try { success = (Boolean) client.execute("filemgr.removeProduct", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return success; } @SuppressWarnings("unchecked") public FileTransferStatus getCurrentFileTransfer() throws DataTransferException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> statusHash; FileTransferStatus status = null; try { statusHash = (Map<String, Object>) client.execute( "filemgr.getCurrentFileTransfer", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } if (statusHash != null) { status = XmlRpcStructFactory .getFileTransferStatusFromXmlRpc(statusHash); } return status; } @SuppressWarnings("unchecked") public List<FileTransferStatus> getCurrentFileTransfers() throws DataTransferException { Vector<Object> argList = new Vector<Object>(); Vector<Map<String, Object>> statusVector; List<FileTransferStatus> statuses = null; try { statusVector = (Vector<Map<String, Object>>) client.execute( "filemgr.getCurrentFileTransfers", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } if (statusVector != null) { statuses = XmlRpcStructFactory .getFileTransferStatusesFromXmlRpc(statusVector); } return statuses; } public double getProductPctTransferred(Product product) throws DataTransferException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); Double pct; try { pct = (Double) client.execute("filemgr.getProductPctTransferred", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } if (pct != null) { return pct; } return -1.0; } public double getRefPctTransferred(Reference reference) throws DataTransferException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> refHash = XmlRpcStructFactory .getXmlRpcReference(reference); argList.add(refHash); Double pct; try { pct = (Double) client.execute("filemgr.getRefPctTransferred", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } if (pct != null) { return pct; } return -1.0; } @SuppressWarnings("unchecked") public ProductPage pagedQuery(Query query, ProductType type, int pageNum) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> queryHash = XmlRpcStructFactory .getXmlRpcQuery(query); Map<String, Object> typeHash = XmlRpcStructFactory .getXmlRpcProductType(type); argList.add(queryHash); argList.add(typeHash); argList.add(pageNum); Map<String, Object> pageHash; try { pageHash = (Map<String, Object>) client.execute( "filemgr.pagedQuery", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return XmlRpcStructFactory.getProductPageFromXmlRpc(pageHash); } @SuppressWarnings("unchecked") public ProductPage getFirstPage(ProductType type) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(XmlRpcStructFactory.getXmlRpcProductType(type)); ProductPage page = null; Map<String, Object> pageHash; try { pageHash = (Map<String, Object>) client.execute( "filemgr.getFirstPage", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } if (pageHash != null) { page = XmlRpcStructFactory.getProductPageFromXmlRpc(pageHash); } return page; } @SuppressWarnings("unchecked") public ProductPage getLastPage(ProductType type) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(XmlRpcStructFactory.getXmlRpcProductType(type)); ProductPage page = null; Map<String, Object> pageHash; try { pageHash = (Map<String, Object>) client.execute( "filemgr.getLastPage", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } if (pageHash != null) { page = XmlRpcStructFactory.getProductPageFromXmlRpc(pageHash); } return page; } @SuppressWarnings("unchecked") public ProductPage getNextPage(ProductType type, ProductPage currPage) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(XmlRpcStructFactory.getXmlRpcProductType(type)); argList.add(XmlRpcStructFactory.getXmlRpcProductPage(currPage)); ProductPage page = null; Map<String, Object> pageHash; try { pageHash = (Map<String, Object>) client.execute( "filemgr.getNextPage", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } if (pageHash != null) { page = XmlRpcStructFactory.getProductPageFromXmlRpc(pageHash); } return page; } @SuppressWarnings("unchecked") public ProductPage getPrevPage(ProductType type, ProductPage currPage) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(XmlRpcStructFactory.getXmlRpcProductType(type)); argList.add(XmlRpcStructFactory.getXmlRpcProductPage(currPage)); ProductPage page = null; Map<String, Object> pageHash; try { pageHash = (Map<String, Object>) client.execute( "filemgr.getPrevPage", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } if (pageHash != null) { page = XmlRpcStructFactory.getProductPageFromXmlRpc(pageHash); } return page; } public String addProductType(ProductType type) throws RepositoryManagerException { String productTypeId; Vector<Object> argList = new Vector<Object>(); Map<String, Object> typeHash = XmlRpcStructFactory .getXmlRpcProductType(type); argList.add(typeHash); try { productTypeId = (String) client.execute("filemgr.addProductType", argList); } catch (XmlRpcException e) { throw new RepositoryManagerException(e); } catch (IOException e) { throw new RepositoryManagerException(e); } return productTypeId; } public boolean hasProduct(String productName) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(productName); boolean hasProduct; try { hasProduct = (Boolean) client.execute("filemgr.hasProduct", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return hasProduct; } public int getNumProducts(ProductType type) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(XmlRpcStructFactory.getXmlRpcProductType(type)); Integer numProducts; try { numProducts = (Integer) client.execute("filemgr.getNumProducts", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return numProducts; } @SuppressWarnings("unchecked") public List<Product> getTopNProducts(int n) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(n); Vector<Map<String, Object>> topNProducts; try { topNProducts = (Vector<Map<String, Object>>) client.execute( "filemgr.getTopNProducts", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return XmlRpcStructFactory .getProductListFromXmlRpc(topNProducts); } @SuppressWarnings("unchecked") public List<Product> getTopNProducts(int n, ProductType type) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(n); Map<String, Object> productTypeHash = XmlRpcStructFactory .getXmlRpcProductType(type); argList.add(productTypeHash); Vector<Map<String, Object>> topNProducts; try { topNProducts = (Vector<Map<String, Object>>) client.execute( "filemgr.getTopNProducts", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return XmlRpcStructFactory .getProductListFromXmlRpc(topNProducts); } public void setProductTransferStatus(Product product) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); try { client.execute("filemgr.setProductTransferStatus", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } } public void addProductReferences(Product product) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); try { client.execute("filemgr.addProductReferences", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } } public void addMetadata(Product product, Metadata metadata) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(XmlRpcStructFactory.getXmlRpcProduct(product)); argList.add(metadata.getHashTable()); try { client.execute("filemgr.addMetadata", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } } public boolean updateMetadata(Product product, Metadata met) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(XmlRpcStructFactory.getXmlRpcProduct(product)); argList.add(met.getHashTable()); boolean result; try { result = (Boolean) client.execute("filemgr.updateMetadata", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return result; } public String catalogProduct(Product product) throws CatalogException { Vector<Object> argList = new Vector<Object>(); argList.add(XmlRpcStructFactory.getXmlRpcProduct(product)); String productId; try { productId = (String) client.execute("filemgr.catalogProduct", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } return productId; } @SuppressWarnings("unchecked") public Metadata getMetadata(Product product) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); Map<String, Object> metadata; try { metadata = (Map<String, Object>) client.execute( "filemgr.getMetadata", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } Metadata m = new Metadata(); m.addMetadata(metadata); return m; } @SuppressWarnings({ "unchecked", "rawtypes" }) public Metadata getReducedMetadata(Product product, List<?> elements) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); argList.add(new Vector(elements)); Map<String, Object> metadata; try { metadata = (Map<String, Object>) client.execute( "filemgr.getReducedMetadata", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } Metadata m = new Metadata(); m.addMetadata(metadata); return m; } public boolean removeFile(String filePath) throws DataTransferException { Vector<Object> argList = new Vector<Object>(); argList.add(filePath); boolean success; try { success = (Boolean) client.execute("filemgr.removeFile", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } return success; } public byte[] retrieveFile(String filePath, int offset, int numBytes) throws DataTransferException { Vector<Object> argList = new Vector<Object>(); argList.add(filePath); argList.add(offset); argList.add(numBytes); try { return (byte[]) client.execute("filemgr.retrieveFile", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } } public void transferFile(String filePath, byte[] fileData, int offset, int numBytes) throws DataTransferException { Vector<Object> argList = new Vector<Object>(); argList.add(filePath); argList.add(fileData); argList.add(offset); argList.add(numBytes); try { client.execute("filemgr.transferFile", argList); } catch (XmlRpcException e) { throw new DataTransferException(e); } catch (IOException e) { throw new DataTransferException(e); } } @SuppressWarnings("unchecked") public List<Product> getProductsByProductType(ProductType type) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productTypeHash = XmlRpcStructFactory .getXmlRpcProductType(type); argList.add(productTypeHash); Vector<Map<String, Object>> productVector; try { productVector = (Vector<Map<String, Object>>) client.execute( "filemgr.getProductsByProductType", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } if (productVector == null) { return null; } else { return XmlRpcStructFactory.getProductListFromXmlRpc(productVector); } } @SuppressWarnings("unchecked") public List<Element> getElementsByProductType(ProductType type) throws ValidationLayerException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productTypeHash = XmlRpcStructFactory .getXmlRpcProductType(type); argList.add(productTypeHash); Vector<Map<String, Object>> elementVector; try { elementVector = (Vector<Map<String, Object>>) client.execute( "filemgr.getElementsByProductType", argList); } catch (XmlRpcException e) { throw new ValidationLayerException(e); } catch (IOException e) { throw new ValidationLayerException(e); } if (elementVector == null) { return null; } else { return XmlRpcStructFactory.getElementListFromXmlRpc(elementVector); } } @SuppressWarnings("unchecked") public Element getElementById(String elementId) throws ValidationLayerException { Vector<Object> argList = new Vector<Object>(); argList.add(elementId); Hashtable<String, Object> elementHash; try { elementHash = (Hashtable<String, Object>) client.execute( "filemgr.getElementById", argList); } catch (XmlRpcException e) { throw new ValidationLayerException(e); } catch (IOException e) { throw new ValidationLayerException(e); } if (elementHash == null) { return null; } else { return XmlRpcStructFactory.getElementFromXmlRpc(elementHash); } } @SuppressWarnings("unchecked") public Element getElementByName(String elementName) throws ValidationLayerException { Vector<Object> argList = new Vector<Object>(); argList.add(elementName); Hashtable<String, Object> elementHash; try { elementHash = (Hashtable<String, Object>) client.execute( "filemgr.getElementByName", argList); } catch (XmlRpcException e) { throw new ValidationLayerException(e); } catch (IOException e) { throw new ValidationLayerException(e); } if (elementHash == null) { return null; } else { return XmlRpcStructFactory.getElementFromXmlRpc(elementHash); } } @SuppressWarnings("unchecked") public Element getElementByName(String elementName, ProductType type) throws ValidationLayerException { Vector<Object> argList = new Vector<Object>(); argList.add(elementName); argList.add(XmlRpcStructFactory.getXmlRpcProductType(type)); Hashtable<String, Object> elementHash; try { elementHash = (Hashtable<String, Object>) client.execute( "filemgr.getElementByName", argList); } catch (XmlRpcException e) { throw new ValidationLayerException(e); } catch (IOException e) { throw new ValidationLayerException(e); } if (elementHash == null) { return null; } else { return XmlRpcStructFactory.getElementFromXmlRpc(elementHash); } } public List<QueryResult> complexQuery(ComplexQuery complexQuery) throws CatalogException { try { Map<String, Object> complexQueryHash = XmlRpcStructFactory .getXmlRpcComplexQuery(complexQuery); Vector<Object> argList = new Vector<Object>(); argList.add(complexQueryHash); @SuppressWarnings("unchecked") Vector<Map<String, Object>> queryResultHashVector = (Vector<Map<String, Object>>) client .execute("filemgr.complexQuery", argList); return XmlRpcStructFactory .getQueryResultsFromXmlRpc(queryResultHashVector); } catch (Exception e) { LOG.log(Level.SEVERE, e.getMessage()); throw new CatalogException(e); } } @SuppressWarnings("unchecked") public List<Product> query(Query query, ProductType type) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> queryHash = XmlRpcStructFactory .getXmlRpcQuery(query); Map<String, Object> typeHash = XmlRpcStructFactory .getXmlRpcProductType(type); argList.add(queryHash); argList.add(typeHash); Vector<Map<String, Object>> productVector; try { productVector = (Vector<Map<String, Object>>) client.execute( "filemgr.query", argList); } catch (XmlRpcException e) { LOG.log(Level.SEVERE, e.getMessage()); throw new CatalogException(e); } catch (IOException e) { LOG.log(Level.SEVERE, e.getMessage()); throw new CatalogException(e); } if (productVector == null) { return null; } else { return XmlRpcStructFactory.getProductListFromXmlRpc(productVector); } } @SuppressWarnings("unchecked") public ProductType getProductTypeByName(String productTypeName) throws RepositoryManagerException { Hashtable<String, Object> productTypeHash; Vector<Object> argList = new Vector<Object>(); argList.add(productTypeName); try { productTypeHash = (Hashtable<String, Object>) client.execute( "filemgr.getProductTypeByName", argList); } catch (XmlRpcException e) { throw new RepositoryManagerException(e.getLocalizedMessage()); } catch (IOException e) { throw new RepositoryManagerException(e); } if (productTypeHash == null) { return null; } else { return XmlRpcStructFactory .getProductTypeFromXmlRpc(productTypeHash); } } @SuppressWarnings("unchecked") public ProductType getProductTypeById(String productTypeId) throws RepositoryManagerException { Hashtable<String, Object> productTypeHash; Vector<Object> argList = new Vector<Object>(); argList.add(productTypeId); try { productTypeHash = (Hashtable<String, Object>) client.execute( "filemgr.getProductTypeById", argList); } catch (XmlRpcException e) { throw new RepositoryManagerException(e); } catch (IOException e) { throw new RepositoryManagerException(e); } if (productTypeHash == null) { return null; } else { return XmlRpcStructFactory .getProductTypeFromXmlRpc(productTypeHash); } } @SuppressWarnings("unchecked") public List<ProductType> getProductTypes() throws RepositoryManagerException { Vector<Object> argList = new Vector<Object>(); Vector<Map<String, Object>> productTypeVector; try { productTypeVector = (Vector<Map<String, Object>>) client .execute("filemgr.getProductTypes", argList); } catch (XmlRpcException e) { throw new RepositoryManagerException(e); } catch (IOException e) { throw new RepositoryManagerException(e); } if (productTypeVector == null) { return null; } else { return XmlRpcStructFactory .getProductTypeListFromXmlRpc(productTypeVector); } } @SuppressWarnings("unchecked") public List<Reference> getProductReferences(Product product) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Vector<Map<String, Object>> productReferenceVector; Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); try { productReferenceVector = (Vector<Map<String, Object>>) client .execute("filemgr.getProductReferences", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } if (productReferenceVector == null) { return null; } else { return XmlRpcStructFactory .getReferencesFromXmlRpc(productReferenceVector); } } @SuppressWarnings("unchecked") public Product getProductById(String productId) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash; argList.add(productId); try { productHash = (Map<String, Object>) client.execute( "filemgr.getProductById", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } if (productHash == null) { return null; } else { return XmlRpcStructFactory.getProductFromXmlRpc(productHash); } } @SuppressWarnings("unchecked") public Product getProductByName(String productName) throws CatalogException { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash; argList.add(productName); try { productHash = (Map<String, Object>) client.execute( "filemgr.getProductByName", argList); } catch (XmlRpcException e) { throw new CatalogException(e); } catch (IOException e) { throw new CatalogException(e); } if (productHash == null) { return null; } else { return XmlRpcStructFactory.getProductFromXmlRpc(productHash); } } public String ingestProduct(Product product, Metadata metadata, boolean clientTransfer) throws VersioningException, XmlRpcException, FileManagerException { try { // ingest product Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); argList.add(metadata.getHashTable()); argList.add(clientTransfer); String productId = (String) client.execute("filemgr.ingestProduct", argList); if (clientTransfer) { LOG.log(Level.FINEST, "File Manager Client: clientTransfer enabled: " + "transfering product [" + product.getProductName() + "]"); // we need to transfer the product ourselves // make sure we have the product ID if (productId == null) { throw new Exception("Request to ingest product: " + product.getProductName() + " but no product ID returned from File " + "Manager ingest"); } if (dataTransfer == null) { throw new Exception("Request to ingest product: [" + product.getProductName() + "] using client transfer, but no " + "dataTransferer specified!"); } product.setProductId(productId); if (!Boolean.getBoolean("org.apache.oodt.cas.filemgr.serverside.versioning")) { // version the product Versioner versioner = GenericFileManagerObjectFactory .getVersionerFromClassName(product.getProductType() .getVersioner()); if (versioner != null) { versioner.createDataStoreReferences(product, metadata); } // add the newly versioned references to the data store try { addProductReferences(product); } catch (CatalogException e) { LOG .log( Level.SEVERE, "ingestProduct: RepositoryManagerException " + "when adding Product References for Product : " + product.getProductName() + " to RepositoryManager: Message: " + e); throw e; } } else { product.setProductReferences(getProductReferences(product)); } // now transfer the product try { dataTransfer.transferProduct(product); // now update the product's transfer status in the data // store product.setTransferStatus(Product.STATUS_RECEIVED); try { setProductTransferStatus(product); } catch (CatalogException e) { LOG .log( Level.SEVERE, "ingestProduct: RepositoryManagerException " + "when updating product transfer status for Product: " + product.getProductName() + " Message: " + e); throw e; } } catch (Exception e) { LOG.log(Level.SEVERE, "ingestProduct: DataTransferException when transfering Product: " + product.getProductName() + ": Message: " + e); throw new DataTransferException(e); } } return productId; // error versioning file } catch (VersioningException e) { LOG.log(Level.SEVERE, e.getMessage()); LOG.log(Level.SEVERE, "ingestProduct: VersioningException when versioning Product: " + product.getProductName() + " with Versioner " + product.getProductType().getVersioner() + ": Message: " + e); throw new VersioningException(e); } catch (XmlRpcException e2) { LOG.log(Level.SEVERE, "Failed to ingest product [ name:" + product.getProductName() + "] :" + e2.getMessage() + " -- rolling back ingest"); try { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); client.execute("filemgr.removeProduct", argList); } catch (Exception e1) { LOG.log(Level.SEVERE, "Failed to rollback ingest of product [" + product + "] : " + e2.getMessage()); } throw e2; } catch (Exception e) { LOG.log(Level.SEVERE, "Failed to ingest product [ id: " + product.getProductId() + "/ name:" + product.getProductName() + "] :" + e + " -- rolling back ingest"); try { Vector<Object> argList = new Vector<Object>(); Map<String, Object> productHash = XmlRpcStructFactory .getXmlRpcProduct(product); argList.add(productHash); client.execute("filemgr.removeProduct", argList); } catch (Exception e1) { LOG.log(Level.SEVERE, "Failed to rollback ingest of product [" + product + "] : " + e); } throw new FileManagerException("Failed to ingest product [" + product + "] : " + e); } } @SuppressWarnings("unchecked") public Metadata getCatalogValues(Metadata metadata, ProductType productType) throws XmlRpcException, IOException { Vector<Object> args = new Vector<Object>(); args.add(metadata.getHashTable()); args.add(XmlRpcStructFactory.getXmlRpcProductType(productType)); Metadata m = new Metadata(); m.addMetadata((Map<String, Object>) this.client.execute( "filemgr.getCatalogValues", args)); return m; } @SuppressWarnings("unchecked") public Metadata getOrigValues(Metadata metadata, ProductType productType) throws XmlRpcException, IOException { Vector<Object> args = new Vector<Object>(); args.add(metadata.getHashTable()); args.add(XmlRpcStructFactory.getXmlRpcProductType(productType)); Metadata m = new Metadata(); m.addMetadata((Map<String, Object>) this.client.execute( "filemgr.getOrigValues", args)); return m; } @SuppressWarnings("unchecked") public Query getCatalogQuery(Query query, ProductType productType) throws XmlRpcException, IOException { Vector<Object> args = new Vector<Object>(); args.add(XmlRpcStructFactory.getXmlRpcQuery(query)); args.add(XmlRpcStructFactory.getXmlRpcProductType(productType)); return XmlRpcStructFactory .getQueryFromXmlRpc((Hashtable<String, Object>) this.client .execute("filemgr.getCatalogQuery", args)); } public static void main(String[] args) { CmdLineUtility cmdLineUtility = new CmdLineUtility(); cmdLineUtility.run(args); } /** * @return Returns the fileManagerUrl. */ public URL getFileManagerUrl() { return fileManagerUrl; } /** * @param fileManagerUrl The fileManagerUrl to set. */ public void setFileManagerUrl(URL fileManagerUrl) { this.fileManagerUrl = fileManagerUrl; // reset the client this.client = new XmlRpcClient(fileManagerUrl); } /** * @return Returns the dataTransfer. */ public DataTransfer getDataTransfer() { return dataTransfer; } /** * @param dataTransfer The dataTransfer to set. */ public void setDataTransfer(DataTransfer dataTransfer) { this.dataTransfer = dataTransfer; this.dataTransfer.setFileManagerUrl(this.fileManagerUrl); } @Override public void close() throws IOException { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.direct; import static com.google.common.base.Preconditions.checkState; import com.google.auto.value.AutoValue; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.Lists; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.apache.beam.runners.core.KeyedWorkItem; import org.apache.beam.runners.core.KeyedWorkItems; import org.apache.beam.runners.core.StateNamespace; import org.apache.beam.runners.core.StateNamespaces; import org.apache.beam.runners.core.StateNamespaces.WindowNamespace; import org.apache.beam.runners.core.StateTag; import org.apache.beam.runners.core.StateTags; import org.apache.beam.runners.core.TimerInternals.TimerData; import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.ParDoMultiOverrideFactory.StatefulParDo; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.reflect.DoFnSignature; import org.apache.beam.sdk.transforms.reflect.DoFnSignature.StateDeclaration; import org.apache.beam.sdk.transforms.reflect.DoFnSignatures; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.util.state.StateSpec; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.TaggedPValue; import org.apache.beam.sdk.values.TupleTag; /** A {@link TransformEvaluatorFactory} for stateful {@link ParDo}. */ final class StatefulParDoEvaluatorFactory<K, InputT, OutputT> implements TransformEvaluatorFactory { private final LoadingCache<AppliedPTransformOutputKeyAndWindow<K, InputT, OutputT>, Runnable> cleanupRegistry; private final ParDoEvaluatorFactory<KV<K, InputT>, OutputT> delegateFactory; StatefulParDoEvaluatorFactory(EvaluationContext evaluationContext) { this.delegateFactory = new ParDoEvaluatorFactory<>(evaluationContext); this.cleanupRegistry = CacheBuilder.newBuilder() .weakValues() .build(new CleanupSchedulingLoader(evaluationContext)); } @Override public <T> TransformEvaluator<T> forApplication( AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle) throws Exception { @SuppressWarnings({"unchecked", "rawtypes"}) TransformEvaluator<T> evaluator = (TransformEvaluator<T>) createEvaluator((AppliedPTransform) application, (CommittedBundle) inputBundle); return evaluator; } @Override public void cleanup() throws Exception { delegateFactory.cleanup(); } @SuppressWarnings({"unchecked", "rawtypes"}) private TransformEvaluator<KeyedWorkItem<K, KV<K, InputT>>> createEvaluator( AppliedPTransform< PCollection<? extends KeyedWorkItem<K, KV<K, InputT>>>, PCollectionTuple, StatefulParDo<K, InputT, OutputT>> application, CommittedBundle<KeyedWorkItem<K, KV<K, InputT>>> inputBundle) throws Exception { final DoFn<KV<K, InputT>, OutputT> doFn = application.getTransform().getUnderlyingParDo().getFn(); final DoFnSignature signature = DoFnSignatures.getSignature(doFn.getClass()); // If the DoFn is stateful, schedule state clearing. // It is semantically correct to schedule any number of redundant clear tasks; the // cache is used to limit the number of tasks to avoid performance degradation. if (signature.stateDeclarations().size() > 0) { for (final WindowedValue<?> element : inputBundle.getElements()) { for (final BoundedWindow window : element.getWindows()) { cleanupRegistry.get( AppliedPTransformOutputKeyAndWindow.create( application, (StructuralKey<K>) inputBundle.getKey(), window)); } } } DoFnLifecycleManagerRemovingTransformEvaluator<KV<K, InputT>> delegateEvaluator = delegateFactory.createEvaluator( (AppliedPTransform) application, inputBundle.getKey(), doFn, application.getTransform().getUnderlyingParDo().getSideInputs(), application.getTransform().getUnderlyingParDo().getMainOutputTag(), application.getTransform().getUnderlyingParDo().getSideOutputTags().getAll()); return new StatefulParDoEvaluator<>(delegateEvaluator); } private class CleanupSchedulingLoader extends CacheLoader<AppliedPTransformOutputKeyAndWindow<K, InputT, OutputT>, Runnable> { private final EvaluationContext evaluationContext; public CleanupSchedulingLoader(EvaluationContext evaluationContext) { this.evaluationContext = evaluationContext; } @Override public Runnable load( final AppliedPTransformOutputKeyAndWindow<K, InputT, OutputT> transformOutputWindow) { String stepName = evaluationContext.getStepName(transformOutputWindow.getTransform()); Map<TupleTag<?>, PCollection<?>> taggedValues = new HashMap<>(); for (TaggedPValue pv : transformOutputWindow.getTransform().getOutputs()) { taggedValues.put(pv.getTag(), (PCollection<?>) pv.getValue()); } PCollection<?> pc = taggedValues .get( transformOutputWindow .getTransform() .getTransform() .getUnderlyingParDo() .getMainOutputTag()); WindowingStrategy<?, ?> windowingStrategy = pc.getWindowingStrategy(); BoundedWindow window = transformOutputWindow.getWindow(); final DoFn<?, ?> doFn = transformOutputWindow.getTransform().getTransform().getUnderlyingParDo().getFn(); final DoFnSignature signature = DoFnSignatures.getSignature(doFn.getClass()); final DirectStepContext stepContext = evaluationContext .getExecutionContext( transformOutputWindow.getTransform(), transformOutputWindow.getKey()) .getOrCreateStepContext(stepName, stepName); final StateNamespace namespace = StateNamespaces.window( (Coder<BoundedWindow>) windowingStrategy.getWindowFn().windowCoder(), window); Runnable cleanup = new Runnable() { @Override public void run() { for (StateDeclaration stateDecl : signature.stateDeclarations().values()) { StateTag<Object, ?> tag; try { tag = StateTags.tagForSpec(stateDecl.id(), (StateSpec) stateDecl.field().get(doFn)); } catch (IllegalAccessException e) { throw new RuntimeException( String.format( "Error accessing %s for %s", StateSpec.class.getName(), doFn.getClass().getName()), e); } stepContext.stateInternals().state(namespace, tag).clear(); } cleanupRegistry.invalidate(transformOutputWindow); } }; evaluationContext.scheduleAfterWindowExpiration( transformOutputWindow.getTransform(), window, windowingStrategy, cleanup); return cleanup; } } @AutoValue abstract static class AppliedPTransformOutputKeyAndWindow<K, InputT, OutputT> { abstract AppliedPTransform< PCollection<? extends KeyedWorkItem<K, KV<K, InputT>>>, PCollectionTuple, StatefulParDo<K, InputT, OutputT>> getTransform(); abstract StructuralKey<K> getKey(); abstract BoundedWindow getWindow(); static <K, InputT, OutputT> AppliedPTransformOutputKeyAndWindow<K, InputT, OutputT> create( AppliedPTransform< PCollection<? extends KeyedWorkItem<K, KV<K, InputT>>>, PCollectionTuple, StatefulParDo<K, InputT, OutputT>> transform, StructuralKey<K> key, BoundedWindow w) { return new AutoValue_StatefulParDoEvaluatorFactory_AppliedPTransformOutputKeyAndWindow<>( transform, key, w); } } private static class StatefulParDoEvaluator<K, InputT> implements TransformEvaluator<KeyedWorkItem<K, KV<K, InputT>>> { private final DoFnLifecycleManagerRemovingTransformEvaluator<KV<K, InputT>> delegateEvaluator; public StatefulParDoEvaluator( DoFnLifecycleManagerRemovingTransformEvaluator<KV<K, InputT>> delegateEvaluator) { this.delegateEvaluator = delegateEvaluator; } @Override public void processElement(WindowedValue<KeyedWorkItem<K, KV<K, InputT>>> gbkResult) throws Exception { for (WindowedValue<KV<K, InputT>> windowedValue : gbkResult.getValue().elementsIterable()) { delegateEvaluator.processElement(windowedValue); } for (TimerData timer : gbkResult.getValue().timersIterable()) { checkState( timer.getNamespace() instanceof WindowNamespace, "Expected Timer %s to be in a %s, but got %s", timer, WindowNamespace.class.getSimpleName(), timer.getNamespace().getClass().getName()); WindowNamespace<?> windowNamespace = (WindowNamespace) timer.getNamespace(); BoundedWindow timerWindow = windowNamespace.getWindow(); delegateEvaluator.onTimer(timer, timerWindow); } } @Override public TransformResult<KeyedWorkItem<K, KV<K, InputT>>> finishBundle() throws Exception { TransformResult<KV<K, InputT>> delegateResult = delegateEvaluator.finishBundle(); StepTransformResult.Builder<KeyedWorkItem<K, KV<K, InputT>>> regroupedResult = StepTransformResult.<KeyedWorkItem<K, KV<K, InputT>>>withHold( delegateResult.getTransform(), delegateResult.getWatermarkHold()) .withTimerUpdate(delegateResult.getTimerUpdate()) .withState(delegateResult.getState()) .withAggregatorChanges(delegateResult.getAggregatorChanges()) .withMetricUpdates(delegateResult.getLogicalMetricUpdates()) .addOutput(Lists.newArrayList(delegateResult.getOutputBundles())); // The delegate may have pushed back unprocessed elements across multiple keys and windows. // Since processing is single-threaded per key and window, we don't need to regroup the // outputs, but just make a bunch of singletons for (WindowedValue<?> untypedUnprocessed : delegateResult.getUnprocessedElements()) { WindowedValue<KV<K, InputT>> windowedKv = (WindowedValue<KV<K, InputT>>) untypedUnprocessed; WindowedValue<KeyedWorkItem<K, KV<K, InputT>>> pushedBack = windowedKv.withValue( KeyedWorkItems.elementsWorkItem( windowedKv.getValue().getKey(), Collections.singleton(windowedKv))); regroupedResult.addUnprocessedElements(pushedBack); } return regroupedResult.build(); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.synthetics.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/synthetics-2017-10-11/DescribeCanaries" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeCanariesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * A token that indicates that there is more data available. You can use this token in a subsequent operation to * retrieve the next set of results. * </p> */ private String nextToken; /** * <p> * Specify this parameter to limit how many canaries are returned each time you use the * <code>DescribeCanaries</code> operation. If you omit this parameter, the default of 100 is used. * </p> */ private Integer maxResults; /** * <p> * Use this parameter to return only canaries that match the names that you specify here. You can specify as many as * five canary names. * </p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the request * fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html"> * Limiting a user to viewing specific canaries</a>. * </p> */ private java.util.List<String> names; /** * <p> * A token that indicates that there is more data available. You can use this token in a subsequent operation to * retrieve the next set of results. * </p> * * @param nextToken * A token that indicates that there is more data available. You can use this token in a subsequent operation * to retrieve the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * A token that indicates that there is more data available. You can use this token in a subsequent operation to * retrieve the next set of results. * </p> * * @return A token that indicates that there is more data available. You can use this token in a subsequent * operation to retrieve the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * A token that indicates that there is more data available. You can use this token in a subsequent operation to * retrieve the next set of results. * </p> * * @param nextToken * A token that indicates that there is more data available. You can use this token in a subsequent operation * to retrieve the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeCanariesRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * Specify this parameter to limit how many canaries are returned each time you use the * <code>DescribeCanaries</code> operation. If you omit this parameter, the default of 100 is used. * </p> * * @param maxResults * Specify this parameter to limit how many canaries are returned each time you use the * <code>DescribeCanaries</code> operation. If you omit this parameter, the default of 100 is used. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * Specify this parameter to limit how many canaries are returned each time you use the * <code>DescribeCanaries</code> operation. If you omit this parameter, the default of 100 is used. * </p> * * @return Specify this parameter to limit how many canaries are returned each time you use the * <code>DescribeCanaries</code> operation. If you omit this parameter, the default of 100 is used. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * Specify this parameter to limit how many canaries are returned each time you use the * <code>DescribeCanaries</code> operation. If you omit this parameter, the default of 100 is used. * </p> * * @param maxResults * Specify this parameter to limit how many canaries are returned each time you use the * <code>DescribeCanaries</code> operation. If you omit this parameter, the default of 100 is used. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeCanariesRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * Use this parameter to return only canaries that match the names that you specify here. You can specify as many as * five canary names. * </p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the request * fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html"> * Limiting a user to viewing specific canaries</a>. * </p> * * @return Use this parameter to return only canaries that match the names that you specify here. You can specify as * many as five canary names.</p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the * request fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html" * > Limiting a user to viewing specific canaries</a>. */ public java.util.List<String> getNames() { return names; } /** * <p> * Use this parameter to return only canaries that match the names that you specify here. You can specify as many as * five canary names. * </p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the request * fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html"> * Limiting a user to viewing specific canaries</a>. * </p> * * @param names * Use this parameter to return only canaries that match the names that you specify here. You can specify as * many as five canary names.</p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the * request fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html" * > Limiting a user to viewing specific canaries</a>. */ public void setNames(java.util.Collection<String> names) { if (names == null) { this.names = null; return; } this.names = new java.util.ArrayList<String>(names); } /** * <p> * Use this parameter to return only canaries that match the names that you specify here. You can specify as many as * five canary names. * </p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the request * fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html"> * Limiting a user to viewing specific canaries</a>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setNames(java.util.Collection)} or {@link #withNames(java.util.Collection)} if you want to override the * existing values. * </p> * * @param names * Use this parameter to return only canaries that match the names that you specify here. You can specify as * many as five canary names.</p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the * request fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html" * > Limiting a user to viewing specific canaries</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeCanariesRequest withNames(String... names) { if (this.names == null) { setNames(new java.util.ArrayList<String>(names.length)); } for (String ele : names) { this.names.add(ele); } return this; } /** * <p> * Use this parameter to return only canaries that match the names that you specify here. You can specify as many as * five canary names. * </p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the request * fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html"> * Limiting a user to viewing specific canaries</a>. * </p> * * @param names * Use this parameter to return only canaries that match the names that you specify here. You can specify as * many as five canary names.</p> * <p> * If you specify this parameter, the operation is successful only if you have authorization to view all the * canaries that you specify in your request. If you do not have permission to view any of the canaries, the * request fails with a 403 response. * </p> * <p> * You are required to use this parameter if you are logged on to a user or role that has an IAM policy that * restricts which canaries that you are allowed to view. For more information, see <a href= * "https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Restricted.html" * > Limiting a user to viewing specific canaries</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeCanariesRequest withNames(java.util.Collection<String> names) { setNames(names); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNames() != null) sb.append("Names: ").append(getNames()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeCanariesRequest == false) return false; DescribeCanariesRequest other = (DescribeCanariesRequest) obj; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNames() == null ^ this.getNames() == null) return false; if (other.getNames() != null && other.getNames().equals(this.getNames()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNames() == null) ? 0 : getNames().hashCode()); return hashCode; } @Override public DescribeCanariesRequest clone() { return (DescribeCanariesRequest) super.clone(); } }
/**************************************************************** * Licensed to the AOS Community (AOS) under one or more * * contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The AOS licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package aos.data.yaml.collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import junit.framework.TestCase; import org.junit.Ignore; import org.yaml.snakeyaml.Yaml; import aos.data.yaml.Util; /** * Test MapBean->Map<Enum, Developer> developers <br/> * Developer class must be properly recognised */ @Ignore public class TypeSafeMap2Test extends TestCase { public void testDumpMap() { MapBean2 bean = new MapBean2(); Map<Developer2, Color> data = new LinkedHashMap<Developer2, Color>(); data.put(new Developer2("Andy", "tester"), Color.BLACK); data.put(new Developer2("Lisa", "owner"), Color.RED); bean.setData(data); Map<Color, Developer2> developers = new LinkedHashMap<Color, Developer2>(); developers.put(Color.WHITE, new Developer2("Fred", "creator")); developers.put(Color.BLACK, new Developer2("John", "committer")); bean.setDevelopers(developers); Yaml yaml = new Yaml(); String output = yaml.dumpAsMap(bean); // System.out.println(output); String etalon = Util.getLocalResource("examples/map-bean-12.yaml"); assertEquals(etalon, output); } public void testMap2() { MapBean2 bean = new MapBean2(); Map<Developer2, Color> data = new LinkedHashMap<Developer2, Color>(); data.put(new Developer2("Andy", "tester"), Color.BLACK); data.put(new SuperMan("Bill", "cleaner", false), Color.BLACK); data.put(new Developer2("Lisa", "owner"), Color.RED); bean.setData(data); Map<Color, Developer2> developers = new LinkedHashMap<Color, Developer2>(); developers.put(Color.WHITE, new Developer2("Fred", "creator")); developers.put(Color.RED, new SuperMan("Jason", "contributor", true)); developers.put(Color.BLACK, new Developer2("John", "committer")); bean.setDevelopers(developers); Yaml yaml = new Yaml(); String output = yaml.dumpAsMap(bean); // System.out.println(output); String etalon = Util.getLocalResource("examples/map-bean-13.yaml"); assertEquals(etalon, output); // load Yaml beanLoader = new Yaml(); MapBean2 parsed = beanLoader.loadAs(etalon, MapBean2.class); assertNotNull(parsed); Map<Developer2, Color> parsedData = parsed.getData(); assertEquals(3, parsedData.size()); assertTrue(parsedData.containsKey(new SuperMan("Bill", "cleaner", false))); assertEquals(Color.BLACK, parsedData.get(new SuperMan("Bill", "cleaner", false))); // Map<Color, Developer2> parsedDevelopers = parsed.getDevelopers(); assertEquals(3, parsedDevelopers.size()); assertEquals(new SuperMan("Jason", "contributor", true), parsedDevelopers.get(Color.RED)); } public void testLoadMap() { String output = Util.getLocalResource("examples/map-bean-12.yaml"); // System.out.println(output); Yaml beanLoader = new Yaml(); MapBean2 parsed = beanLoader.loadAs(output, MapBean2.class); assertNotNull(parsed); Map<Developer2, Color> data = parsed.getData(); assertEquals(2, data.size()); Iterator<Developer2> iter = data.keySet().iterator(); Developer2 first = iter.next(); assertEquals("Andy", first.getName()); assertEquals("tester", first.getRole()); assertEquals(Color.BLACK, data.get(first)); Developer2 second = iter.next(); assertEquals("Lisa", second.getName()); assertEquals("owner", second.getRole()); assertEquals(Color.RED, data.get(second)); // Map<Color, Developer2> developers = parsed.getDevelopers(); assertEquals(2, developers.size()); Iterator<Color> iter2 = developers.keySet().iterator(); Color firstColor = iter2.next(); assertEquals(Color.WHITE, firstColor); Developer2 dev1 = developers.get(firstColor); assertEquals("Fred", dev1.getName()); assertEquals("creator", dev1.getRole()); Color secondColor = iter2.next(); assertEquals(Color.BLACK, secondColor); Developer2 dev2 = developers.get(secondColor); assertEquals("John", dev2.getName()); assertEquals("committer", dev2.getRole()); } public static enum Color { WHITE, BLACK, RED; } public static class MapBean2 { private Map<Developer2, Color> data; private String name; private Map<Color, Developer2> developers; public MapBean2() { name = "Bean123"; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Map<Color, Developer2> getDevelopers() { return developers; } public void setDevelopers(Map<Color, Developer2> developers) { this.developers = developers; } public Map<Developer2, Color> getData() { return data; } public void setData(Map<Developer2, Color> data) { this.data = data; } } public static class Developer2 implements Comparable<Developer2> { private String name; private String role; public Developer2() { } private Developer2(String name, String role) { this.name = name; this.role = role; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getRole() { return role; } public void setRole(String role) { this.role = role; } public int compareTo(Developer2 o) { return name.compareTo(o.name); } @Override public boolean equals(Object obj) { if (obj instanceof Developer2) { return toString().equals(obj.toString()); } else { return false; } } @Override public int hashCode() { return toString().hashCode(); } @Override public String toString() { return "Developer " + name + " " + role; } } public static class SuperMan extends Developer2 { private boolean smart; public SuperMan() { super(); } private SuperMan(String name, String role, boolean smart) { super(name, role); this.smart = smart; } public boolean isSmart() { return smart; } public void setSmart(boolean smart) { this.smart = smart; } @Override public String toString() { return "Super" + super.toString(); } } }
package model.docBot; import adam.IAdam; import exceptions.UnknownTypeException; import exceptions.UnknownUserException; import model.GridPlane; /** * This class controls the robot and the environment the robot is in, * by increasing or decreasing the field values corresponding to the specified user and type. * @author Jochen Joswig * */ public class SimpleBinaryPilot implements IPilot{ protected final IAdam robot; protected final GridPlane grid; protected final DocBotEnvironment environment; public SimpleBinaryPilot(GridPlane grid, IAdam robot, DocBotEnvironment environment){ this.grid = grid; this.robot = robot; this.environment = environment; } /** * Use this method to increase the amount of containers associated with a given user and type by 1. * @param type * @param user * @return */ @Override public boolean increment(String user, String type){ try{ int userDelta = this.grid.getUserIndex(user) - this.environment.getBotUserPosition(); int typeDelta = this.grid.getTypeIndex(type) - this.environment.getBotTypePosition(); //if the robot is already at the right place it does not need to move so we can skip that. if(userDelta != 0 && typeDelta != 0){ this.moveRobotToDepot(type); } while(!this.robot.grab()){} this.moveRobotToDestination(user, type); while(!this.robot.drop()){} this.grid.increment(user, type); return true; }catch(UnknownUserException e){ System.err.println("Unknown user: " + user); }catch(UnknownTypeException e){ System.err.println("Unknown type: "+ type); } return false; } /** * Use this method to decrease the amount of containers associated with a given user and type by 1. * @param type * @param user * @return */ @Override public boolean decrement(String user, String type){ try{ int userDelta = this.grid.getUserIndex(user) - this.environment.getBotUserPosition(); int typeDelta = this.grid.getTypeIndex(type) - this.environment.getBotTypePosition(); //if the robot is already at the right place it does not need to move so we can skip that. if(userDelta != 0 && typeDelta != 0){ this.moveRobotToDestination(user, type); } while(!this.robot.grab()){} this.moveRobotToDepot(type); while(!this.robot.drop()){} this.grid.decrement(user, type); return true; }catch(UnknownUserException e){ System.err.println("Unknown user: " + user); }catch(UnknownTypeException e){ System.err.println("Unknown type: "+ type); } return false; } /** * Use this method to send the robot to the depot of a given type. * It will then calculate the way there. * Once it reached the depot it will grab an item out of that depot and stop. * @param type */ private void moveRobotToDepot(String type){ //horizontal movement try{ double typeDelta = (this.grid.getTypeIndex(type) - this.environment.getBotTypePosition()); if(typeDelta < 0){ while(!this.robot.turnRight(90)){} } else { while(!this.robot.turnLeft(90)){} } typeDelta = typeDelta * this.environment.getSquareWidth() + typeDelta * this.environment.getMaxDocBotMeasurements(); while(!this.robot.moveForward(Math.abs(typeDelta) + (this.environment.getSquareWidth()/2) + (this.environment.getMaxDocBotMeasurements()/2))){} //vertical movement double userDelta = (this.grid.getUserIndex("depot") - this.environment.getBotUserPosition()); if(typeDelta < 0 && userDelta < 0){ while(!this.robot.turnRight(90)){} }else if(typeDelta < 0 && userDelta >= 0){ while(!this.robot.turnLeft(90)){} }else if(typeDelta >= 0 && userDelta < 0){ while(!this.robot.turnLeft(90)){} }else if(typeDelta >= 0 && userDelta >= 0){ while(!this.robot.turnRight(90)){} } userDelta = userDelta * this.environment.getSquareHeight() + userDelta * this.environment.getMaxDocBotMeasurements(); while(!this.robot.moveForward(Math.abs(userDelta))){} if(typeDelta < 0){ while(!this.robot.turnRight(90)){} } else { while(!this.robot.turnLeft(90)){} } while(!this.robot.moveForward((this.environment.getMaxDocBotMeasurements() / 2) + (this.environment.getSquareWidth() / 2))){} if(typeDelta < 0){ while(!this.robot.turnRight(90)){} } else { while(!this.robot.turnLeft(90)){} } this.environment.setBotUserPosition(this.grid.getUserIndex("depot")); this.environment.setBotTypePosition(this.grid.getTypeIndex(type)); }catch(UnknownUserException e){ System.err.println("The depot is unknown."); }catch(UnknownTypeException e){ System.err.println("Unknown type: "+ type); } } /** * Use this method to move the robot from the depot to the desired location, given by user and type. * @param user * @param type */ private void moveRobotToDestination(String user, String type){ try{ //horizontal movement double typeDelta = (this.grid.getTypeIndex(type) - this.environment.getBotTypePosition()); if(typeDelta < 0){ while(!this.robot.turnRight(90)){} } else { while(!this.robot.turnLeft(90)){}; } typeDelta = typeDelta * this.environment.getSquareWidth() + typeDelta * this.environment.getMaxDocBotMeasurements(); while(!this.robot.moveForward(Math.abs(typeDelta) + (this.environment.getSquareWidth()/2) + (this.environment.getMaxDocBotMeasurements()/2))){}; //vertical movement double userDelta = (this.grid.getUserIndex(user) - this.environment.getBotUserPosition()); if(typeDelta < 0 && userDelta < 0){ while(!this.robot.turnRight(90)){} }else if(typeDelta < 0 && userDelta >= 0){ while(!this.robot.turnLeft(90)){} }else if(typeDelta >= 0 && userDelta < 0){ while(!this.robot.turnLeft(90)){} }else if(typeDelta >= 0 && userDelta >= 0){ while(!this.robot.turnRight(90)){} } userDelta = userDelta * this.environment.getSquareHeight() + userDelta * this.environment.getMaxDocBotMeasurements(); while(!this.robot.moveForward(Math.abs(userDelta))){} if(typeDelta > 0){ while(!this.robot.turnLeft(90)){} } else { while(!this.robot.turnRight(90)){} } while(!this.robot.moveForward((this.environment.getMaxDocBotMeasurements() / 2) + (this.environment.getSquareWidth() / 2))){} //TODO add if to check which way to turn is right (eventually not needed) if(typeDelta > 0){ while(!this.robot.turnRight(90)){} } else { while(!this.robot.turnLeft(90)){} } this.environment.setBotUserPosition(this.grid.getUserIndex(user)); this.environment.setBotTypePosition(this.grid.getTypeIndex(type)); }catch(UnknownUserException e){ System.err.println("Unknown user: " + user); }catch(UnknownTypeException e){ System.err.println("Unknown type: "+ type); } } public GridPlane getGridPlane(){ return this.grid; } public void printGrid(){ System.out.println(this.environment.getBotTypePosition() + " " + this.environment.getBotUserPosition()); this.grid.printGridPlane(); } }
/* Copyright 1999-2003 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.parser; import org.w3c.dom.Element; import org.w3c.dom.svg.SVGLength; /** * This class provides methods to convert SVG length and coordinate to * float in user units. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @author <a href="mailto:tkormann@apache.org">Thierry Kormann</a> * @version $Id$ */ public abstract class UnitProcessor { /** * This constant represents horizontal lengths. */ public final static short HORIZONTAL_LENGTH = 2; /** * This constant represents vertical lengths. */ public final static short VERTICAL_LENGTH = 1; /** * This constant represents other lengths. */ public final static short OTHER_LENGTH = 0; /** * No instance of this class is required. */ protected UnitProcessor() { } /** * Returns the specified value with the specified direction in * objectBoundingBox units. * * @param s the value * @param attr the attribute name that represents the value * @param d the direction of the value * @param ctx the context used to resolve relative value */ public static float svgToObjectBoundingBox(String s, String attr, short d, Context ctx) throws ParseException { LengthParser lengthParser = new LengthParser(); UnitResolver ur = new UnitResolver(); lengthParser.setLengthHandler(ur); lengthParser.parse(s); return svgToObjectBoundingBox(ur.value, ur.unit, d, ctx); } /** * Returns the specified value with the specified direction in * objectBoundingBox units. * * @param value the value * @param type the type of the value * @param d the direction of the value * @param ctx the context used to resolve relative value */ public static float svgToObjectBoundingBox(float value, short type, short d, Context ctx) { switch (type) { case SVGLength.SVG_LENGTHTYPE_NUMBER: // as is return value; case SVGLength.SVG_LENGTHTYPE_PERCENTAGE: // If a percentage value is used, it is converted to a // 'bounding box' space coordinate by division by 100 return value / 100f; case SVGLength.SVG_LENGTHTYPE_PX: case SVGLength.SVG_LENGTHTYPE_MM: case SVGLength.SVG_LENGTHTYPE_CM: case SVGLength.SVG_LENGTHTYPE_IN: case SVGLength.SVG_LENGTHTYPE_PT: case SVGLength.SVG_LENGTHTYPE_PC: case SVGLength.SVG_LENGTHTYPE_EMS: case SVGLength.SVG_LENGTHTYPE_EXS: // <!> FIXME: resolve units in userSpace but consider them // in the objectBoundingBox coordinate system return svgToUserSpace(value, type, d, ctx); default: throw new IllegalArgumentException("Length has unknown type"); } } ///////////////////////////////////////////////////////////////////////// // SVG methods - userSpace ///////////////////////////////////////////////////////////////////////// /** * Returns the specified coordinate with the specified direction * in user units. * * @param s the 'other' coordinate * @param attr the attribute name that represents the length * @param d the direction of the coordinate * @param ctx the context used to resolve relative value */ public static float svgToUserSpace(String s, String attr, short d, Context ctx) throws ParseException { LengthParser lengthParser = new LengthParser(); UnitResolver ur = new UnitResolver(); lengthParser.setLengthHandler(ur); lengthParser.parse(s); return svgToUserSpace(ur.value, ur.unit, d, ctx); } /** * Converts the specified value of the specified type and * direction to user units. * * @param v the value to convert * @param type the type of the value * @param d HORIZONTAL_LENGTH, VERTICAL_LENGTH, or OTHER_LENGTH * @param ctx the context used to resolve relative value */ public static float svgToUserSpace(float v, short type, short d, Context ctx) { switch (type) { case SVGLength.SVG_LENGTHTYPE_NUMBER: case SVGLength.SVG_LENGTHTYPE_PX: return v; case SVGLength.SVG_LENGTHTYPE_MM: return (v / ctx.getPixelUnitToMillimeter()); case SVGLength.SVG_LENGTHTYPE_CM: return (v * 10f / ctx.getPixelUnitToMillimeter()); case SVGLength.SVG_LENGTHTYPE_IN: return (v * 25.4f / ctx.getPixelUnitToMillimeter()); case SVGLength.SVG_LENGTHTYPE_PT: return (v * 25.4f / (72f * ctx.getPixelUnitToMillimeter())); case SVGLength.SVG_LENGTHTYPE_PC: return (v * 25.4f / (6f * ctx.getPixelUnitToMillimeter())); case SVGLength.SVG_LENGTHTYPE_EMS: return emsToPixels(v, d, ctx); case SVGLength.SVG_LENGTHTYPE_EXS: return exsToPixels(v, d, ctx); case SVGLength.SVG_LENGTHTYPE_PERCENTAGE: return percentagesToPixels(v, d, ctx); default: throw new IllegalArgumentException("Length has unknown type"); } } /** * Converts the specified value of the specified type and * direction to SVG units. * * @param v the value to convert * @param type the type of the value * @param d HORIZONTAL_LENGTH, VERTICAL_LENGTH, or OTHER_LENGTH * @param ctx the context used to resolve relative value */ public static float userSpaceToSVG(float v, short type, short d, Context ctx) { switch (type) { case SVGLength.SVG_LENGTHTYPE_NUMBER: case SVGLength.SVG_LENGTHTYPE_PX: return v; case SVGLength.SVG_LENGTHTYPE_MM: return (v * ctx.getPixelUnitToMillimeter()); case SVGLength.SVG_LENGTHTYPE_CM: return (v * ctx.getPixelUnitToMillimeter() / 10f); case SVGLength.SVG_LENGTHTYPE_IN: return (v * ctx.getPixelUnitToMillimeter() / 25.4f); case SVGLength.SVG_LENGTHTYPE_PT: return (v * (72f * ctx.getPixelUnitToMillimeter()) / 25.4f); case SVGLength.SVG_LENGTHTYPE_PC: return (v * (6f * ctx.getPixelUnitToMillimeter()) / 25.4f); case SVGLength.SVG_LENGTHTYPE_EMS: return pixelsToEms(v, d, ctx); case SVGLength.SVG_LENGTHTYPE_EXS: return pixelsToExs(v, d, ctx); case SVGLength.SVG_LENGTHTYPE_PERCENTAGE: return pixelsToPercentages(v, d, ctx); default: throw new IllegalArgumentException("Length has unknown type"); } } ///////////////////////////////////////////////////////////////////////// // Utilities methods for relative length ///////////////////////////////////////////////////////////////////////// /** * Converts percentages to user units. * * @param v the percentage to convert * @param d HORIZONTAL_LENGTH, VERTICAL_LENGTH, or OTHER_LENGTH * @param ctx the context */ protected static float percentagesToPixels(float v, short d, Context ctx) { if (d == HORIZONTAL_LENGTH) { float w = ctx.getViewportWidth(); return w * v / 100f; } else if (d == VERTICAL_LENGTH) { float h = ctx.getViewportHeight(); return h * v / 100f; } else { double w = ctx.getViewportWidth(); double h = ctx.getViewportHeight(); double vpp = Math.sqrt(w * w + h * h) / Math.sqrt(2); return (float)(vpp * v / 100d); } } /** * Converts user units to percentages relative to the viewport. * * @param v the value to convert * @param d HORIZONTAL_LENGTH, VERTICAL_LENGTH, or OTHER_LENGTH * @param ctx the context */ protected static float pixelsToPercentages(float v, short d, Context ctx) { if (d == HORIZONTAL_LENGTH) { float w = ctx.getViewportWidth(); return v * 100f / w; } else if (d == VERTICAL_LENGTH) { float h = ctx.getViewportHeight(); return v * 100f / h; } else { double w = ctx.getViewportWidth(); double h = ctx.getViewportHeight(); double vpp = Math.sqrt(w * w + h * h) / Math.sqrt(2); return (float)(v * 100d / vpp); } } /** * Converts user units to ems units. * * @param v the value to convert * @param d HORIZONTAL_LENGTH, VERTICAL_LENGTH, or OTHER_LENGTH * @param ctx the context */ protected static float pixelsToEms(float v, short d, Context ctx) { return v / ctx.getFontSize(); } /** * Converts ems units to user units. * * @param v the value to convert * @param d HORIZONTAL_LENGTH, VERTICAL_LENGTH, or OTHER_LENGTH * @param ctx the context */ protected static float emsToPixels(float v, short d, Context ctx) { return v * ctx.getFontSize(); } /** * Converts user units to exs units. * * @param v the value to convert * @param d HORIZONTAL_LENGTH, VERTICAL_LENGTH, or OTHER_LENGTH * @param ctx the context */ protected static float pixelsToExs(float v, short d, Context ctx) { float xh = ctx.getXHeight(); return v / xh / ctx.getFontSize(); } /** * Converts exs units to user units. * * @param v the value to convert * @param d HORIZONTAL_LENGTH, VERTICAL_LENGTH, or OTHER_LENGTH * @param ctx the context */ protected static float exsToPixels(float v, short d, Context ctx) { float xh = ctx.getXHeight(); return v * xh * ctx.getFontSize(); } /** * A LengthHandler that convert units. */ public static class UnitResolver implements LengthHandler { /** * The length value. */ public float value; /** * The length type. */ public short unit = SVGLength.SVG_LENGTHTYPE_NUMBER; /** * Implements {@link LengthHandler#startLength()}. */ public void startLength() throws ParseException { } /** * Implements {@link LengthHandler#lengthValue(float)}. */ public void lengthValue(float v) throws ParseException { this.value = v; } /** * Implements {@link LengthHandler#em()}. */ public void em() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_EMS; } /** * Implements {@link LengthHandler#ex()}. */ public void ex() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_EXS; } /** * Implements {@link LengthHandler#in()}. */ public void in() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_IN; } /** * Implements {@link LengthHandler#cm()}. */ public void cm() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_CM; } /** * Implements {@link LengthHandler#mm()}. */ public void mm() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_MM; } /** * Implements {@link LengthHandler#pc()}. */ public void pc() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_PC; } /** * Implements {@link LengthHandler#pt()}. */ public void pt() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_PT; } /** * Implements {@link LengthHandler#px()}. */ public void px() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_PX; } /** * Implements {@link LengthHandler#percentage()}. */ public void percentage() throws ParseException { this.unit = SVGLength.SVG_LENGTHTYPE_PERCENTAGE; } /** * Implements {@link LengthHandler#endLength()}. */ public void endLength() throws ParseException { } } /** * Holds the informations needed to compute the units. */ public interface Context { /** * Returns the element. */ Element getElement(); /** * Returns the size of a px CSS unit in millimeters. */ float getPixelUnitToMillimeter(); /** * Returns the size of a px CSS unit in millimeters. * This will be removed after next release. * @see #getPixelUnitToMillimeter() */ float getPixelToMM(); /** * Returns the font-size value. */ float getFontSize(); /** * Returns the x-height value. */ float getXHeight(); /** * Returns the viewport width used to compute units. */ float getViewportWidth(); /** * Returns the viewport height used to compute units. */ float getViewportHeight(); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.popup; import com.intellij.CommonBundle; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.IdeEventQueue; import com.intellij.ide.IdeTooltipManager; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.actionSystem.ex.ActionUtil; import com.intellij.openapi.actionSystem.impl.ActionMenu; import com.intellij.openapi.actionSystem.impl.Utils; import com.intellij.openapi.application.ex.ApplicationEx; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.application.impl.LaterInvocator; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.CaretModel; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.VisualPosition; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.popup.*; import com.intellij.openapi.ui.popup.util.BaseListPopupStep; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.Key; import com.intellij.openapi.wm.WindowManager; import com.intellij.openapi.wm.ex.WindowManagerEx; import com.intellij.openapi.wm.impl.IdeFrameImpl; import com.intellij.ui.ColorUtil; import com.intellij.ui.FocusTrackback; import com.intellij.ui.HintHint; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.components.panels.NonOpaquePanel; import com.intellij.ui.popup.list.IconListPopupRenderer; import com.intellij.ui.popup.list.ListPopupImpl; import com.intellij.ui.popup.mock.MockConfirmation; import com.intellij.ui.popup.tree.TreePopupImpl; import com.intellij.util.IconUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.PlatformIcons; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashMap; import com.intellij.util.containers.WeakHashMap; import com.intellij.util.ui.StatusText; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.EmptyBorder; import javax.swing.event.HyperlinkListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import static com.intellij.openapi.actionSystem.Presentation.*; public class PopupFactoryImpl extends JBPopupFactory { /** * Allows to get an editor position for which a popup with auxiliary information might be shown. * <p/> * Primary intention for this key is to hint popup position for the non-caret location. */ public static final Key<VisualPosition> ANCHOR_POPUP_POSITION = Key.create("popup.anchor.position"); private static final Logger LOG = Logger.getInstance("#com.intellij.ui.popup.PopupFactoryImpl"); private final Map<Disposable, List<Balloon>> myStorage = new WeakHashMap<>(); @NotNull @Override public ListPopup createConfirmation(String title, final Runnable onYes, int defaultOptionIndex) { return createConfirmation(title, CommonBundle.getYesButtonText(), CommonBundle.getNoButtonText(), onYes, defaultOptionIndex); } @NotNull @Override public ListPopup createConfirmation(String title, final String yesText, String noText, final Runnable onYes, int defaultOptionIndex) { return createConfirmation(title, yesText, noText, onYes, EmptyRunnable.getInstance(), defaultOptionIndex); } @NotNull @Override public JBPopup createMessage(String text) { return createListPopup(new BaseListPopupStep<>(null, new String[]{text})); } @Override public Balloon getParentBalloonFor(@Nullable Component c) { if (c == null) return null; Component eachParent = c; while (eachParent != null) { if (eachParent instanceof JComponent) { Object balloon = ((JComponent)eachParent).getClientProperty(Balloon.KEY); if (balloon instanceof Balloon) { return (Balloon)balloon; } } eachParent = eachParent.getParent(); } return null; } @NotNull @Override public ListPopup createConfirmation(String title, final String yesText, String noText, final Runnable onYes, final Runnable onNo, int defaultOptionIndex) { final BaseListPopupStep<String> step = new BaseListPopupStep<String>(title, yesText, noText) { @Override public PopupStep onChosen(String selectedValue, final boolean finalChoice) { return doFinalStep(selectedValue.equals(yesText) ? onYes : onNo); } @Override public void canceled() { onNo.run(); } @Override public boolean isMnemonicsNavigationEnabled() { return true; } }; step.setDefaultOptionIndex(defaultOptionIndex); final ApplicationEx app = ApplicationManagerEx.getApplicationEx(); return app == null || !app.isUnitTestMode() ? new ListPopupImpl(step) : new MockConfirmation(step, yesText); } private static ListPopup createActionGroupPopup(final String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean useAlphaAsNumbers, boolean showDisabledActions, boolean honorActionMnemonics, final Runnable disposeCallback, final int maxRowCount) { return createActionGroupPopup(title, actionGroup, dataContext, showNumbers, useAlphaAsNumbers, showDisabledActions, honorActionMnemonics, disposeCallback, maxRowCount, null, null); } public ListPopup createActionGroupPopup(final String title, final ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean showDisabledActions, boolean honorActionMnemonics, final Runnable disposeCallback, final int maxRowCount) { return createActionGroupPopup(title, actionGroup, dataContext, showNumbers, showDisabledActions, honorActionMnemonics, disposeCallback, maxRowCount, null); } @NotNull public ListPopup createActionGroupPopup(String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, ActionSelectionAid aid, boolean showDisabledActions, Runnable disposeCallback, int maxRowCount, Condition<AnAction> preselectActionCondition, @Nullable String actionPlace) { return new ActionGroupPopup(title, actionGroup, dataContext, aid == ActionSelectionAid.ALPHA_NUMBERING || aid == ActionSelectionAid.NUMBERING, aid == ActionSelectionAid.ALPHA_NUMBERING, showDisabledActions, aid == ActionSelectionAid.MNEMONICS, disposeCallback, maxRowCount, preselectActionCondition, actionPlace); } private static ListPopup createActionGroupPopup(String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean useAlphaAsNumbers, boolean showDisabledActions, boolean honorActionMnemonics, Runnable disposeCallback, int maxRowCount, Condition<AnAction> preselectActionCondition, @Nullable String actionPlace) { return new ActionGroupPopup(title, actionGroup, dataContext, showNumbers, useAlphaAsNumbers, showDisabledActions, honorActionMnemonics, disposeCallback, maxRowCount, preselectActionCondition, actionPlace); } public static class ActionGroupPopup extends ListPopupImpl { private final Runnable myDisposeCallback; private final Component myComponent; private final String myActionPlace; private IconHoverListener myIconsHoverListener; public ActionGroupPopup(final String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean useAlphaAsNumbers, boolean showDisabledActions, boolean honorActionMnemonics, final Runnable disposeCallback, final int maxRowCount, final Condition<AnAction> preselectActionCondition, @Nullable final String actionPlace) { this(title, actionGroup, dataContext, showNumbers, useAlphaAsNumbers, showDisabledActions, honorActionMnemonics, disposeCallback, maxRowCount, preselectActionCondition, actionPlace, false); } public ActionGroupPopup(final String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean useAlphaAsNumbers, boolean showDisabledActions, boolean honorActionMnemonics, final Runnable disposeCallback, final int maxRowCount, final Condition<AnAction> preselectActionCondition, @Nullable final String actionPlace, boolean autoSelection) { this(null, createStep(title, actionGroup, dataContext, showNumbers, useAlphaAsNumbers, showDisabledActions, honorActionMnemonics, preselectActionCondition, actionPlace, autoSelection), disposeCallback, dataContext, actionPlace, maxRowCount); } protected ActionGroupPopup(@Nullable WizardPopup aParent, @NotNull ListPopupStep step, @Nullable Runnable disposeCallback, @NotNull DataContext dataContext, @Nullable String actionPlace, int maxRowCount) { super(aParent, step, maxRowCount); myDisposeCallback = disposeCallback; myComponent = PlatformDataKeys.CONTEXT_COMPONENT.getData(dataContext); myActionPlace = actionPlace == null ? ActionPlaces.UNKNOWN : actionPlace; registerAction("handleActionToggle1", KeyEvent.VK_SPACE, 0, new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { handleToggleAction(); } }); addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { final JList list = (JList)e.getSource(); final ActionItem actionItem = (ActionItem)list.getSelectedValue(); if (actionItem == null) return; Presentation presentation = updateActionItem(actionItem); ActionMenu.showDescriptionInStatusBar(true, myComponent, presentation.getDescription()); } }); } @NotNull private Presentation updateActionItem(@NotNull ActionItem actionItem) { AnAction action = actionItem.getAction(); Presentation presentation = new Presentation(); presentation.setDescription(action.getTemplatePresentation().getDescription()); final AnActionEvent actionEvent = new AnActionEvent(null, DataManager.getInstance().getDataContext(myComponent), myActionPlace, presentation, ActionManager.getInstance(), 0); actionEvent.setInjectedContext(action.isInInjectedContext()); ActionUtil.performDumbAwareUpdate(LaterInvocator.isInModalContext(), action, actionEvent, false); return presentation; } private static ListPopupStep createStep(String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean useAlphaAsNumbers, boolean showDisabledActions, boolean honorActionMnemonics, Condition<AnAction> preselectActionCondition, @Nullable String actionPlace, boolean autoSelection) { final Component component = PlatformDataKeys.CONTEXT_COMPONENT.getData(dataContext); LOG.assertTrue(component != null, "dataContext has no component for new ListPopupStep"); final ActionStepBuilder builder = new ActionStepBuilder(dataContext, showNumbers, useAlphaAsNumbers, showDisabledActions, honorActionMnemonics); if (actionPlace != null) { builder.setActionPlace(actionPlace); } builder.buildGroup(actionGroup); final List<ActionItem> items = builder.getItems(); return new ActionPopupStep(items, title, component, showNumbers || honorActionMnemonics && itemsHaveMnemonics(items), preselectActionCondition, autoSelection, showDisabledActions); } @Override public void dispose() { if (myDisposeCallback != null) { myDisposeCallback.run(); } getList().removeMouseMotionListener(myIconsHoverListener); getList().removeListSelectionListener(myIconsHoverListener); ActionMenu.showDescriptionInStatusBar(true, myComponent, null); super.dispose(); } @Override public void handleSelect(boolean handleFinalChoices, InputEvent e) { final Object selectedValue = getList().getSelectedValue(); final ActionPopupStep actionPopupStep = ObjectUtils.tryCast(getListStep(), ActionPopupStep.class); if (actionPopupStep != null) { KeepingPopupOpenAction dontClosePopupAction = getActionByClass(selectedValue, actionPopupStep, KeepingPopupOpenAction.class); if (dontClosePopupAction != null) { actionPopupStep.performAction((AnAction)dontClosePopupAction, e != null ? e.getModifiers() : 0, e); for (ActionItem item : actionPopupStep.myItems) { updateActionItem(item); } getList().repaint(); return; } } super.handleSelect(handleFinalChoices, e); } protected void handleToggleAction() { final Object[] selectedValues = getList().getSelectedValues(); ListPopupStep<Object> listStep = getListStep(); final ActionPopupStep actionPopupStep = ObjectUtils.tryCast(listStep, ActionPopupStep.class); if (actionPopupStep == null) return; List<ToggleAction> filtered = ContainerUtil.mapNotNull(selectedValues, o -> getActionByClass(o, actionPopupStep, ToggleAction.class)); for (ToggleAction action : filtered) { actionPopupStep.performAction(action, 0); } for (ActionItem item : actionPopupStep.myItems) { updateActionItem(item); } getList().repaint(); } public void installOnHoverIconsSupport(@NotNull IconListPopupRenderer iconListPopupRenderer) { //OnHover icons listener should be installed once assert myIconsHoverListener == null; myIconsHoverListener = new IconHoverListener(iconListPopupRenderer); } @Override protected boolean beforeShow() { getList().addMouseMotionListener(myIconsHoverListener); getList().addListSelectionListener(myIconsHoverListener); return super.beforeShow(); } @Nullable private static <T> T getActionByClass(@Nullable Object value, @NotNull ActionPopupStep actionPopupStep, @NotNull Class<T> actionClass) { ActionItem item = value instanceof ActionItem ? (ActionItem)value : null; if (item == null) return null; if (!actionPopupStep.isSelectable(item)) return null; return actionClass.isInstance(item.getAction()) ? actionClass.cast(item.getAction()) : null; } private class IconHoverListener extends MouseMotionAdapter implements ListSelectionListener { @NotNull private IconListPopupRenderer myRenderer; public IconHoverListener(@NotNull IconListPopupRenderer renderer) { myRenderer = renderer; } @Override public void mouseMoved(MouseEvent e) { Point point = e.getPoint(); int index = getList().locationToIndex(point); Rectangle bounds = getList().getCellBounds(index, index); Object selectedValue = getList().getSelectedValue(); if (selectedValue instanceof ActionItem) { ((ActionItem)selectedValue).setIconHovered(myRenderer.isIconAt(point)); } if (bounds != null) { getList().repaint(bounds); } } @Override public void valueChanged(ListSelectionEvent e) { if (!e.getValueIsAdjusting()) { int selected = getSelectedIndex(); int unselected = e.getFirstIndex() == selected ? e.getLastIndex() : e.getFirstIndex(); Object elementAt = getList().getModel().getElementAt(unselected); if (elementAt instanceof ActionItem) { ActionItem actionItem = (ActionItem)elementAt; actionItem.setIconHovered(false); getList().repaint(); } } } } } @NotNull @Override public ListPopup createActionGroupPopup(final String title, @NotNull final ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean showDisabledActions, boolean honorActionMnemonics, final Runnable disposeCallback, final int maxRowCount, final Condition<AnAction> preselectActionCondition) { return createActionGroupPopup(title, actionGroup, dataContext, showNumbers, true, showDisabledActions, honorActionMnemonics, disposeCallback, maxRowCount, preselectActionCondition, null); } @NotNull @Override public ListPopup createActionGroupPopup(String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, ActionSelectionAid selectionAidMethod, boolean showDisabledActions) { return createActionGroupPopup(title, actionGroup, dataContext, selectionAidMethod == ActionSelectionAid.NUMBERING || selectionAidMethod == ActionSelectionAid.ALPHA_NUMBERING, selectionAidMethod == ActionSelectionAid.ALPHA_NUMBERING, showDisabledActions, selectionAidMethod == ActionSelectionAid.MNEMONICS, null, -1); } @NotNull @Override public ListPopup createActionGroupPopup(String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, ActionSelectionAid selectionAidMethod, boolean showDisabledActions, @Nullable String actionPlace) { return createActionGroupPopup(title, actionGroup, dataContext, selectionAidMethod, showDisabledActions, null, -1, null, actionPlace); } @NotNull @Override public ListPopup createActionGroupPopup(String title, @NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, ActionSelectionAid selectionAidMethod, boolean showDisabledActions, Runnable disposeCallback, int maxRowCount) { return createActionGroupPopup(title, actionGroup, dataContext, selectionAidMethod, showDisabledActions, disposeCallback, maxRowCount, null, null); } @NotNull @Override public ListPopupStep createActionsStep(@NotNull final ActionGroup actionGroup, @NotNull DataContext dataContext, final boolean showNumbers, final boolean showDisabledActions, final String title, final Component component, final boolean honorActionMnemonics) { return createActionsStep(actionGroup, dataContext, showNumbers, showDisabledActions, title, component, honorActionMnemonics, 0, false); } private static ListPopupStep createActionsStep(@NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean useAlphaAsNumbers, boolean showDisabledActions, String title, Component component, boolean honorActionMnemonics, final int defaultOptionIndex, final boolean autoSelectionEnabled) { final List<ActionItem> items = makeActionItemsFromActionGroup(actionGroup, dataContext, showNumbers, useAlphaAsNumbers, showDisabledActions, honorActionMnemonics); return new ActionPopupStep(items, title, component, showNumbers || honorActionMnemonics && itemsHaveMnemonics(items), action -> defaultOptionIndex >= 0 && defaultOptionIndex < items.size() && items.get(defaultOptionIndex).getAction().equals(action), autoSelectionEnabled, showDisabledActions); } @NotNull private static List<ActionItem> makeActionItemsFromActionGroup(@NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean useAlphaAsNumbers, boolean showDisabledActions, boolean honorActionMnemonics) { final ActionStepBuilder builder = new ActionStepBuilder(dataContext, showNumbers, useAlphaAsNumbers, showDisabledActions, honorActionMnemonics); builder.buildGroup(actionGroup); return builder.getItems(); } @NotNull private static ListPopupStep createActionsStep(@NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean useAlphaAsNumbers, boolean showDisabledActions, String title, Component component, boolean honorActionMnemonics, Condition<AnAction> preselectActionCondition, boolean autoSelectionEnabled) { final List<ActionItem> items = makeActionItemsFromActionGroup(actionGroup, dataContext, showNumbers, useAlphaAsNumbers, showDisabledActions, honorActionMnemonics); return new ActionPopupStep(items, title, component, showNumbers || honorActionMnemonics && itemsHaveMnemonics(items), preselectActionCondition, autoSelectionEnabled, showDisabledActions); } @NotNull @Override public ListPopupStep createActionsStep(@NotNull ActionGroup actionGroup, @NotNull DataContext dataContext, boolean showNumbers, boolean showDisabledActions, String title, Component component, boolean honorActionMnemonics, int defaultOptionIndex, final boolean autoSelectionEnabled) { return createActionsStep(actionGroup, dataContext, showNumbers, true, showDisabledActions, title, component, honorActionMnemonics, defaultOptionIndex, autoSelectionEnabled); } private static boolean itemsHaveMnemonics(final List<ActionItem> items) { for (ActionItem item : items) { if (item.getAction().getTemplatePresentation().getMnemonic() != 0) return true; } return false; } @NotNull @Override public ListPopup createWizardStep(@NotNull PopupStep step) { return new ListPopupImpl((ListPopupStep)step); } @NotNull @Override public ListPopup createListPopup(@NotNull ListPopupStep step) { return new ListPopupImpl(step); } @NotNull @Override public ListPopup createListPopup(@NotNull ListPopupStep step, int maxRowCount) { return new ListPopupImpl(step, maxRowCount); } @NotNull @Override public TreePopup createTree(JBPopup parent, @NotNull TreePopupStep aStep, Object parentValue) { return new TreePopupImpl(parent, aStep, parentValue); } @NotNull @Override public TreePopup createTree(@NotNull TreePopupStep aStep) { return new TreePopupImpl(aStep); } @NotNull @Override public ComponentPopupBuilder createComponentPopupBuilder(@NotNull JComponent content, JComponent prefferableFocusComponent) { return new ComponentPopupBuilderImpl(content, prefferableFocusComponent); } @NotNull @Override public RelativePoint guessBestPopupLocation(@NotNull DataContext dataContext) { Component component = PlatformDataKeys.CONTEXT_COMPONENT.getData(dataContext); JComponent focusOwner = component instanceof JComponent ? (JComponent)component : null; if (focusOwner == null) { Project project = CommonDataKeys.PROJECT.getData(dataContext); IdeFrameImpl frame = project == null ? null : ((WindowManagerEx)WindowManager.getInstance()).getFrame(project); focusOwner = frame == null ? null : frame.getRootPane(); if (focusOwner == null) { throw new IllegalArgumentException("focusOwner cannot be null"); } } final Point point = PlatformDataKeys.CONTEXT_MENU_POINT.getData(dataContext); if (point != null) { return new RelativePoint(focusOwner, point); } Editor editor = CommonDataKeys.EDITOR.getData(dataContext); if (editor != null && focusOwner == editor.getContentComponent()) { return guessBestPopupLocation(editor); } else { return guessBestPopupLocation(focusOwner); } } @NotNull @Override public RelativePoint guessBestPopupLocation(@NotNull final JComponent component) { Point popupMenuPoint = null; final Rectangle visibleRect = component.getVisibleRect(); if (component instanceof JList) { // JList JList list = (JList)component; int firstVisibleIndex = list.getFirstVisibleIndex(); int lastVisibleIndex = list.getLastVisibleIndex(); int[] selectedIndices = list.getSelectedIndices(); for (int index : selectedIndices) { if (firstVisibleIndex <= index && index <= lastVisibleIndex) { Rectangle cellBounds = list.getCellBounds(index, index); popupMenuPoint = new Point(visibleRect.x + visibleRect.width / 4, cellBounds.y + cellBounds.height); break; } } } else if (component instanceof JTree) { // JTree JTree tree = (JTree)component; int[] selectionRows = tree.getSelectionRows(); if (selectionRows != null) { Arrays.sort(selectionRows); for (int i = 0; i < selectionRows.length; i++) { int row = selectionRows[i]; Rectangle rowBounds = tree.getRowBounds(row); if (visibleRect.contains(rowBounds)) { popupMenuPoint = new Point(rowBounds.x + 2, rowBounds.y + rowBounds.height - 1); break; } } if (popupMenuPoint == null) {//All selected rows are out of visible rect Point visibleCenter = new Point(visibleRect.x + visibleRect.width / 2, visibleRect.y + visibleRect.height / 2); double minDistance = Double.POSITIVE_INFINITY; int bestRow = -1; Point rowCenter; double distance; for (int i = 0; i < selectionRows.length; i++) { int row = selectionRows[i]; Rectangle rowBounds = tree.getRowBounds(row); rowCenter = new Point(rowBounds.x + rowBounds.width / 2, rowBounds.y + rowBounds.height / 2); distance = visibleCenter.distance(rowCenter); if (minDistance > distance) { minDistance = distance; bestRow = row; } } if (bestRow != -1) { Rectangle rowBounds = tree.getRowBounds(bestRow); tree.scrollRectToVisible( new Rectangle(rowBounds.x, rowBounds.y, Math.min(visibleRect.width, rowBounds.width), rowBounds.height)); popupMenuPoint = new Point(rowBounds.x + 2, rowBounds.y + rowBounds.height - 1); } } } } else if (component instanceof JTable) { JTable table = (JTable)component; int column = table.getColumnModel().getSelectionModel().getLeadSelectionIndex(); int row = Math.max(table.getSelectionModel().getLeadSelectionIndex(), table.getSelectionModel().getAnchorSelectionIndex()); Rectangle rect = table.getCellRect(row, column, false); if (!visibleRect.intersects(rect)) { table.scrollRectToVisible(rect); } popupMenuPoint = new Point(rect.x, rect.y + rect.height); } else if (component instanceof PopupOwner) { popupMenuPoint = ((PopupOwner)component).getBestPopupPosition(); } if (popupMenuPoint == null) { popupMenuPoint = new Point(visibleRect.x + visibleRect.width / 2, visibleRect.y + visibleRect.height / 2); } return new RelativePoint(component, popupMenuPoint); } @Override public boolean isBestPopupLocationVisible(@NotNull Editor editor) { return getVisibleBestPopupLocation(editor) != null; } @NotNull @Override public RelativePoint guessBestPopupLocation(@NotNull Editor editor) { Point p = getVisibleBestPopupLocation(editor); if (p == null) { final Rectangle visibleArea = editor.getScrollingModel().getVisibleArea(); p = new Point(visibleArea.x + visibleArea.width / 3, visibleArea.y + visibleArea.height / 2); } return new RelativePoint(editor.getContentComponent(), p); } @Nullable private static Point getVisibleBestPopupLocation(@NotNull Editor editor) { VisualPosition visualPosition = editor.getUserData(ANCHOR_POPUP_POSITION); if (visualPosition == null) { CaretModel caretModel = editor.getCaretModel(); if (caretModel.isUpToDate()) { visualPosition = caretModel.getVisualPosition(); } else { visualPosition = editor.offsetToVisualPosition(caretModel.getOffset()); } } Point p = editor.visualPositionToXY(new VisualPosition(visualPosition.line + 1, visualPosition.column)); final Rectangle visibleArea = editor.getScrollingModel().getVisibleArea(); return !visibleArea.contains(p) && !visibleArea.contains(p.x, p.y - editor.getLineHeight()) ? null : p; } @Override public Point getCenterOf(JComponent container, JComponent content) { return AbstractPopup.getCenterOf(container, content); } public static class ActionItem implements ShortcutProvider { private final AnAction myAction; private String myText; private final boolean myIsEnabled; @Nullable private ActionStepBuilder.IconWrapper myIcon; private final boolean myPrependWithSeparator; private final String mySeparatorText; private final String myDescription; private ActionItem(@NotNull AnAction action, @NotNull String text, @Nullable String description, boolean enabled, @Nullable ActionStepBuilder.IconWrapper icon, final boolean prependWithSeparator, String separatorText) { myAction = action; myText = text; myIsEnabled = enabled; myIcon = icon; myPrependWithSeparator = prependWithSeparator; mySeparatorText = separatorText; myDescription = description; myAction.getTemplatePresentation().addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { if (evt.getPropertyName() == PROP_ICON || evt.getPropertyName() == PROP_HOVERED_ICON) { updateIcons(); } else if (evt.getPropertyName() == PROP_TEXT) { myText = myAction.getTemplatePresentation().getText(); } } }); } private void updateIcons() { // we can't set icons if it hasn't existed before, because alignment will be destroyed; use IconWrapper with null icon instead of null if (myIcon == null) return; Presentation presentation = myAction.getTemplatePresentation(); myIcon.setIcons(presentation.getIcon(), presentation.getHoveredIcon()); } @NotNull public AnAction getAction() { return myAction; } @NotNull public String getText() { return myText; } @Nullable public ActionStepBuilder.IconWrapper getIcon() { return myIcon; } public boolean isPrependWithSeparator() { return myPrependWithSeparator; } public String getSeparatorText() { return mySeparatorText; } public boolean isEnabled() { return myIsEnabled; } public String getDescription() { return myDescription; } @Nullable @Override public ShortcutSet getShortcut() { return myAction.getShortcutSet(); } @Override public String toString() { return myText; } public void setIconHovered(boolean isHovered) { if (myIcon != null) { myIcon.setHovered(isHovered); } } } private static class ActionPopupStep implements ListPopupStepEx<ActionItem>, MnemonicNavigationFilter<ActionItem>, SpeedSearchFilter<ActionItem> { private final List<ActionItem> myItems; private final String myTitle; private final Component myContext; private final boolean myEnableMnemonics; private final int myDefaultOptionIndex; private final boolean myAutoSelectionEnabled; private final boolean myShowDisabledActions; private Runnable myFinalRunnable; @Nullable private final Condition<AnAction> myPreselectActionCondition; private ActionPopupStep(@NotNull final List<ActionItem> items, final String title, Component context, boolean enableMnemonics, @Nullable Condition<AnAction> preselectActionCondition, final boolean autoSelection, boolean showDisabledActions) { myItems = items; myTitle = title; myContext = context; myEnableMnemonics = enableMnemonics; myDefaultOptionIndex = getDefaultOptionIndexFromSelectCondition(preselectActionCondition, items); myPreselectActionCondition = preselectActionCondition; myAutoSelectionEnabled = autoSelection; myShowDisabledActions = showDisabledActions; } private static int getDefaultOptionIndexFromSelectCondition(@Nullable Condition<AnAction> preselectActionCondition, @NotNull List<ActionItem> items) { int defaultOptionIndex = 0; if (preselectActionCondition != null) { for (int i = 0; i < items.size(); i++) { final AnAction action = items.get(i).getAction(); if (preselectActionCondition.value(action)) { defaultOptionIndex = i; break; } } } return defaultOptionIndex; } @Override @NotNull public List<ActionItem> getValues() { return myItems; } @Override public boolean isSelectable(final ActionItem value) { return value.isEnabled(); } @Override public int getMnemonicPos(final ActionItem value) { final String text = getTextFor(value); int i = text.indexOf(UIUtil.MNEMONIC); if (i < 0) { i = text.indexOf('&'); } if (i < 0) { i = text.indexOf('_'); } return i; } @Override public Icon getIconFor(final ActionItem aValue) { return aValue.getIcon(); } @Override @NotNull public String getTextFor(final ActionItem value) { return value.getText(); } @Nullable @Override public String getTooltipTextFor(ActionItem value) { return value.getDescription(); } @Override public void setEmptyText(@NotNull StatusText emptyText) { } @Override public ListSeparator getSeparatorAbove(final ActionItem value) { return value.isPrependWithSeparator() ? new ListSeparator(value.getSeparatorText()) : null; } @Override public int getDefaultOptionIndex() { return myDefaultOptionIndex; } @Override public String getTitle() { return myTitle; } @Override public PopupStep onChosen(final ActionItem actionChoice, final boolean finalChoice) { return onChosen(actionChoice, finalChoice, 0); } @Override public PopupStep onChosen(ActionItem actionChoice, boolean finalChoice, final int eventModifiers) { if (!actionChoice.isEnabled()) return FINAL_CHOICE; final AnAction action = actionChoice.getAction(); DataManager mgr = DataManager.getInstance(); final DataContext dataContext = myContext != null ? mgr.getDataContext(myContext) : mgr.getDataContext(); if (action instanceof ActionGroup && (!finalChoice || !((ActionGroup)action).canBePerformed(dataContext))) { return createActionsStep((ActionGroup)action, dataContext, myEnableMnemonics, true, myShowDisabledActions, null, myContext, false, myPreselectActionCondition, false); } else { myFinalRunnable = () -> performAction(action, eventModifiers); return FINAL_CHOICE; } } public void performAction(@NotNull AnAction action, int modifiers) { performAction(action, modifiers, null); } public void performAction(@NotNull AnAction action, int modifiers, InputEvent inputEvent) { final DataManager mgr = DataManager.getInstance(); final DataContext dataContext = myContext != null ? mgr.getDataContext(myContext) : mgr.getDataContext(); final AnActionEvent event = new AnActionEvent(inputEvent, dataContext, ActionPlaces.UNKNOWN, action.getTemplatePresentation().clone(), ActionManager.getInstance(), modifiers); event.setInjectedContext(action.isInInjectedContext()); if (ActionUtil.lastUpdateAndCheckDumb(action, event, false)) { ActionUtil.performActionDumbAware(action, event); } } @Override public Runnable getFinalRunnable() { return myFinalRunnable; } @Override public boolean hasSubstep(final ActionItem selectedValue) { return selectedValue != null && selectedValue.isEnabled() && selectedValue.getAction() instanceof ActionGroup; } @Override public void canceled() { } @Override public boolean isMnemonicsNavigationEnabled() { return myEnableMnemonics; } @Override public MnemonicNavigationFilter<ActionItem> getMnemonicNavigationFilter() { return this; } @Override public boolean canBeHidden(final ActionItem value) { return true; } @Override public String getIndexedString(final ActionItem value) { return getTextFor(value); } @Override public boolean isSpeedSearchEnabled() { return true; } @Override public boolean isAutoSelectionEnabled() { return myAutoSelectionEnabled; } @Override public SpeedSearchFilter<ActionItem> getSpeedSearchFilter() { return this; } } @Override @NotNull public List<JBPopup> getChildPopups(@NotNull final Component component) { return FocusTrackback.getChildPopups(component); } @Override public boolean isPopupActive() { return IdeEventQueue.getInstance().isPopupActive(); } private static class ActionStepBuilder { private final List<ActionItem> myListModel; private final DataContext myDataContext; private final boolean myShowNumbers; private final boolean myUseAlphaAsNumbers; private final boolean myShowDisabled; private final HashMap<AnAction, Presentation> myAction2presentation; private int myCurrentNumber; private boolean myPrependWithSeparator; private String mySeparatorText; private final boolean myHonorActionMnemonics; private IconWrapper myEmptyIcon; private int myMaxIconWidth = -1; private int myMaxIconHeight = -1; @NotNull private String myActionPlace; private ActionStepBuilder(@NotNull DataContext dataContext, final boolean showNumbers, final boolean useAlphaAsNumbers, final boolean showDisabled, final boolean honorActionMnemonics) { myUseAlphaAsNumbers = useAlphaAsNumbers; myListModel = new ArrayList<>(); myDataContext = dataContext; myShowNumbers = showNumbers; myShowDisabled = showDisabled; myAction2presentation = new HashMap<>(); myCurrentNumber = 0; myPrependWithSeparator = false; mySeparatorText = null; myHonorActionMnemonics = honorActionMnemonics; myActionPlace = ActionPlaces.UNKNOWN; } public void setActionPlace(@NotNull String actionPlace) { myActionPlace = actionPlace; } @NotNull public List<ActionItem> getItems() { return myListModel; } public void buildGroup(@NotNull ActionGroup actionGroup) { calcMaxIconSize(actionGroup); myEmptyIcon = myMaxIconHeight != -1 && myMaxIconWidth != -1 ? createWrapper(null) : null; appendActionsFromGroup(actionGroup); if (myListModel.isEmpty()) { myListModel.add(new ActionItem(Utils.EMPTY_MENU_FILLER, Utils.NOTHING_HERE, null, false, null, false, null)); } } private void calcMaxIconSize(final ActionGroup actionGroup) { AnAction[] actions = actionGroup.getChildren(createActionEvent(actionGroup)); for (AnAction action : actions) { if (action == null) continue; if (action instanceof ActionGroup) { final ActionGroup group = (ActionGroup)action; if (!group.isPopup()) { calcMaxIconSize(group); continue; } } Icon icon = action.getTemplatePresentation().getIcon(); if (icon == null && action instanceof Toggleable) icon = PlatformIcons.CHECK_ICON; if (icon != null) { final int width = icon.getIconWidth(); final int height = icon.getIconHeight(); if (myMaxIconWidth < width) { myMaxIconWidth = width; } if (myMaxIconHeight < height) { myMaxIconHeight = height; } } } } @NotNull private AnActionEvent createActionEvent(@NotNull AnAction actionGroup) { final AnActionEvent actionEvent = new AnActionEvent(null, myDataContext, myActionPlace, getPresentation(actionGroup), ActionManager.getInstance(), 0); actionEvent.setInjectedContext(actionGroup.isInInjectedContext()); return actionEvent; } private void appendActionsFromGroup(@NotNull ActionGroup actionGroup) { AnAction[] actions = actionGroup.getChildren(createActionEvent(actionGroup)); for (AnAction action : actions) { if (action == null) { LOG.error("null action in group " + actionGroup); continue; } if (action instanceof Separator) { myPrependWithSeparator = true; mySeparatorText = ((Separator)action).getText(); } else { if (action instanceof ActionGroup) { ActionGroup group = (ActionGroup)action; if (group.isPopup()) { appendAction(group); } else { appendActionsFromGroup(group); } } else { appendAction(action); } } } } private void appendAction(@NotNull AnAction action) { Presentation presentation = getPresentation(action); AnActionEvent event = createActionEvent(action); ActionUtil.performDumbAwareUpdate(LaterInvocator.isInModalContext(), action, event, true); if ((myShowDisabled || presentation.isEnabled()) && presentation.isVisible()) { String text = presentation.getText(); if (myShowNumbers) { if (myCurrentNumber < 9) { text = "&" + (myCurrentNumber + 1) + ". " + text; } else if (myCurrentNumber == 9) { text = "&" + 0 + ". " + text; } else if (myUseAlphaAsNumbers) { text = "&" + (char)('A' + myCurrentNumber - 10) + ". " + text; } myCurrentNumber++; } else if (myHonorActionMnemonics) { text = Presentation.restoreTextWithMnemonic(text, action.getTemplatePresentation().getMnemonic()); } Icon icon = presentation.isEnabled() ? presentation.getIcon() : IconLoader.getDisabledIcon(presentation.getIcon()); IconWrapper iconWrapper; if (icon == null && presentation.getHoveredIcon() == null) { @NonNls final String actionId = ActionManager.getInstance().getId(action); if (actionId != null && actionId.startsWith("QuickList.")) { iconWrapper = createWrapper(AllIcons.Actions.QuickList); } else if (action instanceof Toggleable) { boolean toggled = Boolean.TRUE.equals(presentation.getClientProperty(Toggleable.SELECTED_PROPERTY)); iconWrapper = toggled ? createWrapper(PlatformIcons.CHECK_ICON) : myEmptyIcon; } else { iconWrapper = myEmptyIcon; } } else { iconWrapper = new IconWrapper(icon, presentation.getHoveredIcon(), myMaxIconWidth, myMaxIconHeight); } boolean prependSeparator = (!myListModel.isEmpty() || mySeparatorText != null) && myPrependWithSeparator; assert text != null : action + " has no presentation"; myListModel.add( new ActionItem(action, text, (String)presentation.getClientProperty(JComponent.TOOL_TIP_TEXT_KEY), presentation.isEnabled(), iconWrapper, prependSeparator, mySeparatorText)); myPrependWithSeparator = false; mySeparatorText = null; } } /** * Adjusts icon size to maximum, so that icons with different sizes were aligned correctly. */ public static class IconWrapper extends IconUtil.IconSizeWrapper { @Nullable private Icon myIcon; @Nullable private Icon myHoverIcon; private boolean isHovered; public IconWrapper(@Nullable Icon icon, @Nullable Icon hoverIcon, int width, int height) { super(null, width, height); setIcons(icon, hoverIcon); } @Override public void paintIcon(Component c, Graphics g, int x, int y) { paintIcon(myHoverIcon != null && isHovered ? myHoverIcon : myIcon, c, g, x, y); } public boolean isHovered() { return isHovered; } public void setHovered(boolean hovered) { isHovered = hovered; } public void setIcons(@Nullable Icon icon, @Nullable Icon hoveredIcon) { myIcon = icon; myHoverIcon = hoveredIcon; } } @NotNull public IconWrapper createWrapper(@Nullable Icon icon) { return new IconWrapper(icon, null, myMaxIconWidth, myMaxIconHeight); } private Presentation getPresentation(@NotNull AnAction action) { Presentation presentation = myAction2presentation.get(action); if (presentation == null) { presentation = action.getTemplatePresentation().clone(); myAction2presentation.put(action, presentation); } return presentation; } } @NotNull @Override public BalloonBuilder createBalloonBuilder(@NotNull final JComponent content) { return new BalloonPopupBuilderImpl(myStorage, content); } @NotNull @Override public BalloonBuilder createDialogBalloonBuilder(@NotNull JComponent content, String title) { final BalloonPopupBuilderImpl builder = new BalloonPopupBuilderImpl(myStorage, content); final Color bg = UIManager.getColor("Panel.background"); final Color borderOriginal = Color.darkGray; final Color border = ColorUtil.toAlpha(borderOriginal, 75); builder .setDialogMode(true) .setTitle(title) .setAnimationCycle(200) .setFillColor(bg).setBorderColor(border).setHideOnClickOutside(false) .setHideOnKeyOutside(false) .setHideOnAction(false) .setCloseButtonEnabled(true) .setShadow(true); return builder; } @NotNull @Override public BalloonBuilder createHtmlTextBalloonBuilder(@NotNull final String htmlContent, @Nullable final Icon icon, final Color fillColor, @Nullable final HyperlinkListener listener) { JEditorPane text = IdeTooltipManager.initPane(htmlContent, new HintHint().setAwtTooltip(true), null); if (listener != null) { text.addHyperlinkListener(listener); } text.setEditable(false); NonOpaquePanel.setTransparent(text); text.setBorder(null); JLabel label = new JLabel(); final JPanel content = new NonOpaquePanel(new BorderLayout((int)(label.getIconTextGap() * 1.5), (int)(label.getIconTextGap() * 1.5))); final NonOpaquePanel textWrapper = new NonOpaquePanel(new GridBagLayout()); JScrollPane scrolledText = new JScrollPane(text); scrolledText.setBackground(fillColor); scrolledText.getViewport().setBackground(fillColor); scrolledText.getViewport().setBorder(null); scrolledText.setBorder(null); textWrapper.add(scrolledText); content.add(textWrapper, BorderLayout.CENTER); final NonOpaquePanel north = new NonOpaquePanel(new BorderLayout()); north.add(new JLabel(icon), BorderLayout.NORTH); content.add(north, BorderLayout.WEST); content.setBorder(new EmptyBorder(2, 4, 2, 4)); final BalloonBuilder builder = createBalloonBuilder(content); builder.setFillColor(fillColor); return builder; } @NotNull @Override public BalloonBuilder createHtmlTextBalloonBuilder(@NotNull String htmlContent, MessageType messageType, @Nullable HyperlinkListener listener) { return createHtmlTextBalloonBuilder(htmlContent, messageType.getDefaultIcon(), messageType.getPopupBackground(), listener); } }
/* * Copyright 2007 Sun Microsystems, Inc. * * This file is part of jVoiceBridge. * * jVoiceBridge is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation and distributed hereunder * to you. * * jVoiceBridge is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * Sun designates this particular file as subject to the "Classpath" * exception as provided by Sun in the License file that accompanied this * code. */ package com.sun.voip.server; import com.sun.voip.CallParticipant; import com.sun.voip.CallEvent; import com.sun.voip.CallEventListener; import com.sun.voip.CallState; import com.sun.voip.Logger; import com.sun.voip.TreatmentDoneListener; import com.sun.voip.TreatmentManager; import java.io.IOException; import java.net.InetSocketAddress; import java.net.SocketException; import java.util.NoSuchElementException; import java.util.Vector; import java.text.ParseException; /** * Common code for handling incoming outgoing calls. */ public abstract class CallHandler extends Thread { protected ConferenceManager conferenceManager; protected ConferenceMember member; protected MemberSender memberSender; protected MemberReceiver memberReceiver; protected CallSetupAgent csa; protected CallParticipant cp; protected boolean done = false; protected String reasonCallEnded; protected boolean suppressStatus; /* * maintain a list of active calls so individual calls * can be terminated. */ protected static Vector activeCalls = new Vector(); private Vector callEventListeners = new Vector(); /* * One receiver per conference is ideal for scaling. * In order for this to work, we must be able to distinguish * calls based on the source address in packets we receive * from each call. The SIP port exchange only involves destination * ports. For most implmentations (Cisco Gateway, Cisco IP Phone, * conference bridge software) the source and destination ports * are the same. Sip-Communicator is one exception. * * So when we see a sip URI for a target phone number, we create * a separate conference receiver specifically for that call. */ private static boolean oneReceiverPerConference = true; private static int duplicateCallLimit = 100; private static boolean enablePSTNCalls = true; /* * Used by the OutgoingCallHandler to handle two party calls. * This is here so that dtmf keys can be forwarded with two party calls. */ protected CallHandler otherCall; public void suppressStatus(boolean suppressStatus) { this.suppressStatus = suppressStatus; } /* * Mostly for debugging */ public String getCallState() { String s = "\n" + cp.toString(); if (member != null) { s += " ConferenceId: " + member.getConferenceManager().getId() + "\n"; s += "\tStarted " + member.getTimeStarted() + "\n"; } else { s += "\n"; } if (csa != null) { s += "\tState = " + csa.getCallState() + "\n"; } else { s += "\tNo Call Setup Agent" + "\n"; } s += "\tIsDistributedBridge " + cp.isDistributedBridge() + "\n"; if (cp.getCallTimeout() == 0) { s += "\tNo timeout\n"; } else { s += "\tCall timeout in " + (cp.getCallTimeout() / 1000) + " seconds\n"; } if (member != null) { s += " " + member.getMemberState(); } return s; } public static String getCallStateForAllCalls() { String s = ""; synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); s += call.getCallState() + "\n"; } } return s; } public static String getAllMixDescriptors() { String s = ""; synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); s += "MixDescriptors for " + call + "\n"; s += call.getMember().getMixDescriptors() + "\n"; } } return s; } public static String getAllAbbreviatedMixDescriptors() { String s = ""; synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); s += "MixDescriptors for " + call + "\n"; s += call.getMember().getAbbreviatedMixDescriptors() + "\n"; } } return s; } /* * Overridden by OutgoingCallhandler. There is no request handler * for incoming calls. */ public abstract CallEventListener getRequestHandler(); public CallParticipant getCallParticipant() { return cp; } /* * Used to switch a call from one conference to another. */ public void setConferenceManager(ConferenceManager conferenceManager) { this.conferenceManager = conferenceManager; } public ConferenceManager getConferenceManager() { return conferenceManager; } public ConferenceMember getMember() { return member; } public MemberSender getMemberSender() { return memberSender; } public MemberReceiver getMemberReceiver() { return memberReceiver; } /* * This method is called by a CallSetupAgent once the endpoint * address is known. The endpoint address is the address from which * we expect to receive RTP packets and to which we will send RTP packets. */ public void setEndpointAddress(InetSocketAddress isa, byte mediaPayload, byte receivePayload, byte telephoneEventPayload) { setEndpointAddress(isa, mediaPayload, receivePayload, telephoneEventPayload, null); } public void setEndpointAddress(InetSocketAddress isa, byte mediaPayload, byte receivePayload, byte telephoneEventPayload, InetSocketAddress rtcpAddress) { member.initialize(this, isa, mediaPayload, receivePayload, telephoneEventPayload, rtcpAddress); } /* * true if call is established */ public boolean isCallEstablished() { if (done || csa == null) { return false; } return csa.isCallEstablished(); } /* * true is call is ending */ public boolean isCallEnding() { if (done || csa == null) { return true; } return csa.isCallEnding(); } public void addCallEventListener(CallEventListener listener) { synchronized (callEventListeners) { callEventListeners.add(listener); } } public void removeCallEventListener(CallEventListener listener) { synchronized (callEventListeners) { callEventListeners.remove(listener); } } public void sendCallEventNotification(CallEvent callEvent) { if (cp.getCallId() != null) { callEvent.setCallId(cp.getCallId()); } else { callEvent.setCallId("CallIdNotInitialized"); } callEvent.setConferenceId(cp.getConferenceId()); callEvent.setCallInfo(cp.getCallOwner()); if (csa != null) { callEvent.setCallState(csa.getCallState()); } else { callEvent.setCallState(new CallState(CallState.UNINITIALIZED)); } synchronized (callEventListeners) { for (int i = 0; i < callEventListeners.size(); i++) { CallEventListener listener = (CallEventListener) callEventListeners.elementAt(i); listener.callEventNotification(callEvent); } } } /* * The subclasses must override this. */ public abstract void callEventNotification(CallEvent callEvent); /** * Send indication when speaker starts or stops speaking. */ public static int totalSpeaking; public void speakingChanged(boolean isSpeaking) { if (isSpeaking) { totalSpeaking++; CallEvent callEvent = new CallEvent(CallEvent.STARTED_SPEAKING); callEvent.setStartedSpeaking(); sendCallEventNotification(callEvent); } else { totalSpeaking--; CallEvent callEvent = new CallEvent(CallEvent.STOPPED_SPEAKING); callEvent.setStoppedSpeaking(); sendCallEventNotification(callEvent); } } public static int getTotalSpeaking() { return totalSpeaking; } /** * Send indication when a dtmf key is pressed */ public void dtmfKeys(String dtmfKeys) { //if (Logger.logLevel >= Logger.LOG_MOREINFO) { Logger.println(cp + " got dtmf keys " + dtmfKeys + " " + cp.dtmfDetection()); //} if (isCallEstablished()) { if (cp.dtmfDetection()) { member.stopTreatment(null); CallEvent callEvent = new CallEvent(CallEvent.DTMF_KEY); callEvent.setDtmfKey(dtmfKeys); sendCallEventNotification(callEvent); } if (otherCall != null) { Logger.println("Call " + cp + " forwarding dtmf key " + dtmfKeys + " to " + otherCall); otherCall.getMemberSender().setDtmfKeyToSend(dtmfKeys); } else { getMemberSender().setDtmfKeyToSend(dtmfKeys); } } else { if (Logger.logLevel >= Logger.LOG_MOREINFO) { Logger.println(cp + " Call not established, ignoring dtmf"); } stopCallAnsweredTreatment(); } } public void stopCallAnsweredTreatment() { if (done || csa == null) { return; } csa.stopCallAnsweredTreatment(); } public void stopCallEstablishedTreatment() { if (done || csa == null) { return; } csa.stopCallEstablishedTreatment(); } /* * terminate a call. */ public void cancelRequest(String reason) { if (done) { return; } done = true; Logger.println(cp + " Cancel request " + reason); if (csa != null) { csa.cancelRequest(reason); } } /* * Add a treatment for the caller */ public void addTreatment(TreatmentManager treatmentManager) { member.addTreatment(treatmentManager); } /* * unique call identifier incremented for each new call. */ private static int callNumber = 0; public static synchronized String getNewCallId() { String s; do { callNumber++; s = String.valueOf(callNumber); String location = Bridge.getBridgeLocation(); if (location.equalsIgnoreCase("Unknown") == false) { s += "_" + Bridge.getBridgeLocation(); } } while (CallHandler.findCall(s) != null); return s; } /** * Find the new call of a call migration. */ public static CallHandler findMigratingCall(String callId) { synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId) && cp.migrateCall()) { if (Logger.logLevel >= Logger.LOG_DETAIL) { Logger.println("findMigratingCall: found " + callId); } return call; } } } return null; } /** * Find a call by callId. * * Calls are kept in the activeCalls list and uniquely identified * by <callId>::<name>@<phoneNumber> for a phone call and * * This method searches for a call with the callId. */ public static CallHandler findCall(String callId) { if (Logger.logLevel >= Logger.LOG_DETAIL) { Logger.println("findCall: looking for " + callId + ", " + activeCalls.size() + " active calls"); } synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (Logger.logLevel >= Logger.LOG_DETAIL) { Logger.println("findCall: looking for " + callId + " got " + cp.getCallId()); } if (match(cp, callId)) { if (Logger.logLevel >= Logger.LOG_DETAIL) { Logger.println("findCall: found " + callId); } return call; } } } return null; } private static boolean match(CallParticipant cp, String callId) { if (cp.getCallId().equals(callId)) { return true; } if (ConferenceManager.allowShortNames() == false) { return false; } String name = cp.getName(); if (name != null) { if (name.equals(callId)) { return true; } name = name.replaceAll(" ", "_"); if (name.equals(callId)) { return true; } } String number = cp.getPhoneNumber(); if (number == null) { return false; } if (number.equals(callId)) { return true; } if (number.indexOf("sip:") == 0) { int ix = number.indexOf("@"); if (ix >= 0) { number = number.substring(4, ix); if (number.equals(callId)) { return true; } } } return false; } /* * Add call to list of active calls */ public void addCall(CallHandler callHandler) { synchronized(activeCalls) { activeCalls.add(callHandler); // add to list of active calls } } /* * Remove call from list of active calls */ public void removeCall(CallHandler callHandler) { synchronized(activeCalls) { activeCalls.remove(callHandler); // remove call from list Logger.println(""); Logger.println("calls still in progress: " + activeCalls.size()); Logger.println(""); } } /* * End all calls. */ public static void shutdown() { shutdown(0); } public static void shutdown(int delaySeconds) { if (delaySeconds == 0) { /* * Quick shutdown right now! */ hangup("0", "System shutdown"); return; } /* * Notify the active calls that the MC bridge is shutting down */ long start = System.currentTimeMillis(); Logger.println("Shutting down in " + delaySeconds + " seconds"); synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); String id = call.getCallParticipant().getCallId(); try { playTreatmentToCall(id, "joinBELL.au;shutdown.au;tts:" + delaySeconds + ";seconds.au"); } catch (IOException e) { Logger.println("Can't play shutdown treatment to call " + id + " " + e.getMessage()); } } /* * Wait at most a minute in case something is severly broken. */ while (System.currentTimeMillis() - start < 60000) { boolean hasTreatments = false; for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); hasTreatments = call.getMember().hasTreatments(); if (hasTreatments) { break; } } if (hasTreatments == false) { break; // no treatments left to play } } } if (delaySeconds != 0) { int sleepTime = (int)((delaySeconds * 1000) - (System.currentTimeMillis() - start)); if (sleepTime > 0) { try { Thread.sleep(sleepTime); } catch (InterruptedException e) { } } } hangup("0", "System shutdown"); } /* * Cancel a specified call. If callid is 0, all calls are cancelled. */ public static void hangup(String callId, String reason) { Vector callsToCancel = new Vector(); synchronized(activeCalls) { /* * Make a list of all the calls we want to cancel, then cancel them. * We have to cancel them while not synchronized or * we could deadlock. */ for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (callId.equals("0") || match(cp, callId)) { callsToCancel.add(call); } } } cancel(callsToCancel, reason, false); } public static void hangupOwner(String ownerId, String reason) { Vector callsToCancel = new Vector(); synchronized(activeCalls) { /* * Make a list of all the calls we want to cancel, then cancel them. * We have to cancel them while not synchronized or * we could deadlock. */ for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (cp.getCallOwner().equals(ownerId)) { callsToCancel.add(call); } } } cancel(callsToCancel, reason, false); } public static void suspendBridge() { cancel(activeCalls, "bridge suspended", true); } private static void cancel(Vector callsToCancel, String reason, boolean suppressStatus) { while (callsToCancel.size() > 0) { CallHandler call = (CallHandler)callsToCancel.remove(0); call.suppressStatus(suppressStatus); call.cancelRequest(reason); } } public String getReasonCallEnded() { return reasonCallEnded; } /* * Set cnThresh for the speech detector for a conference member. */ public static void setCnThresh(String callId, int cnThresh) { synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { MemberReceiver memberReceiver = call.getMemberReceiver(); if (memberReceiver != null) { memberReceiver.setCnThresh(cnThresh); } } } } } /* * force packets to be dropped for debugging. */ public static void setDropPackets(String callId, int dropPackets) { if (callId == null) { return; } synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { MemberReceiver memberReceiver = call.getMemberReceiver(); if (memberReceiver != null) { memberReceiver.setDropPackets(dropPackets); } } } } } /** * Mute or unmute a conference member. */ public void setMuted(boolean isMuted) { MemberReceiver memberReceiver = getMemberReceiver(); if (memberReceiver != null) { memberReceiver.setMuted(isMuted); } } public static void setMuted(String callId, boolean isMuted) { if (callId == null) { return; } synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { if (Logger.logLevel >= Logger.LOG_DETAIL) { String s = ""; if (isMuted == false) { s = "un"; } Logger.println(cp.getCallId() + ": " + s + "muted"); } MemberReceiver memberReceiver = call.getMemberReceiver(); if (memberReceiver != null) { memberReceiver.setMuted(isMuted); } } } } } public void setRemoteMediaInfo(String sdp) throws ParseException { csa.setRemoteMediaInfo(sdp); } public static void setRemoteMediaInfo(String callId, String sdp) throws ParseException { synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { call.setRemoteMediaInfo(sdp); return; } } } throw new ParseException("Invalid callId: " + callId, 0); } /* * Say the number of calls in the conference */ public int getNumberOfCalls() { return conferenceManager.getNumberOfMembers(); } /** * Mute or unmute member in a whisperGroup */ public static void setMuteWhisperGroup(String callId, boolean isMuted) { if (callId == null) { return; } synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { if (Logger.logLevel >= Logger.LOG_DETAIL) { String s = ""; if (isMuted == false) { s = "un"; } Logger.println(cp.getCallId() + ": " + s + "muted"); } MemberReceiver memberReceiver = call.getMemberReceiver(); if (memberReceiver != null) { memberReceiver.setMuteWhisperGroup(isMuted); } } } } } /** * Mute or unmute a conference from a particular call. */ public static void setConferenceMuted(String callId, boolean isMuted) { if (callId == null) { return; } synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { if (Logger.logLevel >= Logger.LOG_DETAIL) { String s = ""; if (isMuted == false) { s = "un"; } Logger.println(cp.getCallId() + ": conference " + s + "muted"); } ConferenceMember member = call.getMember(); if (member!= null) { member.setConferenceMuted(isMuted); } } } } } /** * Mute or unmute the main conference from a particular call. */ public static void setConferenceSilenced(String callId, boolean isSilenced) { synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { if (Logger.logLevel >= Logger.LOG_DETAIL) { String s = ""; if (isSilenced == false) { s = "un"; } Logger.println(cp.getCallId() + ": silenceMainonference " + s + "muted"); } ConferenceMember member = call.getMember(); if (member!= null) { member.setConferenceSilenced(isSilenced); } } } } } /* * Set powerThresholdLimit for the speech detector for a member. */ public static void setPowerThresholdLimit(String callId, double powerThresholdLimit) { synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { MemberReceiver memberReceiver = call.getMemberReceiver(); if (memberReceiver != null) { memberReceiver.setPowerThresholdLimit( powerThresholdLimit); } } } } } /** * set dmtfSuppression flag */ private static boolean dtmfSuppression = true; public static void setDtmfSuppression(String callId, boolean dtmfSuppression) throws NoSuchElementException { if (callId.equals("0")) { CallHandler.dtmfSuppression = dtmfSuppression; return; } CallHandler callHandler = findCall(callId); if (callHandler == null) { throw new NoSuchElementException("Invalid callId specified: " + callId); } callHandler.getCallParticipant().setDtmfSuppression(dtmfSuppression); } /** * Set flag to do voice detection while muted */ public static void setVoiceDetectionWhileMuted(String callId, boolean voiceDetectionWhileMuted) { if (callId == null) { return; } synchronized(activeCalls) { for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (match(cp, callId)) { cp.setVoiceDetectionWhileMuted(voiceDetectionWhileMuted); if (Logger.logLevel >= Logger.LOG_DETAIL) { Logger.println(cp.getCallId() + " voice detection while muted is " + voiceDetectionWhileMuted); } } } } } /** * Get global dtmfSuppression flag */ public static boolean dtmfSuppression() { return dtmfSuppression; } public static void setDoNotRecord(String callId, boolean doNotRecord) throws NoSuchElementException { CallHandler callHandler = findCall(callId); if (callHandler == null) { throw new NoSuchElementException("Invalid callId specified: " + callId); } if (Logger.logLevel >= Logger.LOG_DETAIL) { String s = ""; if (doNotRecord == true) { s = "NOT"; } Logger.println(callHandler + ": " + s + " okay to record"); } callHandler.getMemberReceiver().setDoNotRecord(doNotRecord); } /** * Record data sent to or from a member */ public static void recordMember(String callId, boolean enabled, String recordingFile, String recordingType, boolean fromMember) throws NoSuchElementException, IOException { CallHandler callHandler = findCall(callId); if (callHandler == null) { throw new NoSuchElementException("Invalid callId specified: " + callId); } if (fromMember) { callHandler.getMemberReceiver().setRecordFromMember(enabled, recordingFile, recordingType); } else { callHandler.getMemberSender().setRecordToMember(enabled, recordingFile, recordingType); } } /** * Play a treament to a member */ public static void playTreatmentToCall(String callId, String treatment) throws NoSuchElementException, IOException { playTreatmentToCall(callId, treatment, (TreatmentDoneListener) null); } public static void playTreatmentToCall(String callId, String treatment, double[] volume) throws NoSuchElementException, IOException { } public static void playTreatmentToCall(String callId, String treatment, TreatmentDoneListener treatmentDoneListener) throws NoSuchElementException, IOException { CallHandler callHandler = findCall(callId); if (callHandler == null) { throw new NoSuchElementException("Invalid callId specified: " + callId); } if (callHandler.isCallEstablished() == false) { throw new IOException("Call is not ESTABLISHED: " + callId); } callHandler.playTreatmentToCall(treatment, treatmentDoneListener); } public TreatmentManager playTreatmentToCall(String treatment) throws IOException { return playTreatmentToCall(treatment, (TreatmentDoneListener) null); } public TreatmentManager playTreatmentToCall(String treatment, TreatmentDoneListener treatmentDoneListener) throws IOException { if (Logger.logLevel >= Logger.LOG_MOREINFO) { Logger.println("Playing treatment " + treatment + " to " + cp.getCallId()); } TreatmentManager treatmentManager = new TreatmentManager(treatment, 0, conferenceManager.getMediaInfo().getSampleRate(), conferenceManager.getMediaInfo().getChannels()); if (treatmentDoneListener != null) { treatmentManager.addTreatmentDoneListener(treatmentDoneListener); } addTreatment(treatmentManager); return treatmentManager; } /** * get the IP address and port used to receive packets for this call. */ public InetSocketAddress getReceiveAddress() { return memberReceiver.getReceiveAddress(); } /** * get the IP address and port used to send packets to this call. */ public InetSocketAddress getSendAddress() { return memberSender.getSendAddress(); } /** * Determine if this is the first member to join the conference. * This is called to determine if a special audio treatment * should be played. */ public boolean isFirstMember() { return conferenceManager.isFirstMember(); } /** * For debugging... */ public static boolean tooManyDuplicateCalls(String phoneNumber) { synchronized(activeCalls) { int n = 0; for (int i = 0; i < activeCalls.size(); i++) { CallHandler call = (CallHandler)activeCalls.elementAt(i); CallParticipant cp = call.getCallParticipant(); if (cp.getPhoneNumber().equals(phoneNumber)) { n++; } } if (n > duplicateCallLimit) { return true; } return false; } } public static void setDuplicateCallLimit(int duplicateCallLimit) { CallHandler.duplicateCallLimit = duplicateCallLimit; } public static int getDuplicateCallLimit() { return duplicateCallLimit; } public static void enablePSTNCalls(boolean enablePSTNCalls) { CallHandler.enablePSTNCalls = enablePSTNCalls; } public static boolean enablePSTNCalls() { return enablePSTNCalls; } /** * String representation of this Caller * @return the string representation of this Caller */ public String toString() { return cp.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.security.tests.java.security; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.security.DigestException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.Provider; import java.security.Security; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; public class MessageDigest2Test extends junit.framework.TestCase { private static final String MESSAGEDIGEST_ID = "MessageDigest."; private Map<Provider, List<String>> digestAlgs = new HashMap<Provider, List<String>>(); private static final byte[] AR1 = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }; private static final byte[] AR2 = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }; private static final String MESSAGE = "abc"; private static final byte[] MESSAGE_DIGEST = { -87, -103, 62, 54, 71, 6, -127, 106, -70, 62, 37, 113, 120, 80, -62, 108, -100, -48, -40, -99, }; private static final byte[] MESSAGE_DIGEST_63_As = { 3, -16, -97, 91, 21, -118, 122, -116, -38, -39, 32, -67, -36, 41, -72, 28, 24, -91, 81, -11, }; private static final byte[] MESSAGE_DIGEST_64_As = { 0, -104, -70, -126, 75, 92, 22, 66, 123, -41, -95, 18, 42, 90, 68, 42, 37, -20, 100, 77, }; private static final byte[] MESSAGE_DIGEST_65_As = { 17, 101, 83, 38, -57, 8, -41, 3, 25, -66, 38, 16, -24, -91, 125, -102, 91, -107, -99, 59, }; /** * java.security.MessageDigest#MessageDigest(java.lang.String) */ public void test_constructor() { for (List<String> algorithms : digestAlgs.values()) { for (String algorithm : algorithms) { MessageDigestStub md = new MessageDigestStub(algorithm); assertEquals(algorithm, md.getAlgorithm()); assertEquals(0, md.getDigestLength()); assertNull(md.getProvider()); } } } /** * java.security.MessageDigest#clone() */ public void test_clone() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest d1 = MessageDigest.getInstance(algorithm, e.getKey().getName()); for (byte b = 0; b < 84; b++) { d1.update(b); } MessageDigest d2 = (MessageDigest) d1.clone(); d1.update((byte) 1); d2.update((byte) 1); assertTrue("cloned hash differs from original for algorithm " + algorithm, MessageDigest.isEqual(d1.digest(), d2.digest())); } } } private static final byte[] SHA_DATA_2 = { 70, -54, 124, 120, -29, 57, 56, 119, -108, -54, -97, -76, -97, -50, -63, -73, 2, 85, -53, -79, }; private void testSerializationSHA_DATA_2(MessageDigest sha) throws Exception { sha.reset(); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream output = new DataOutputStream(out); // Made up data output.writeUTF("tests.api.java.security.MessageDigestTest$InitializerFieldsTest3"); output.writeInt(0); // class modifiers output.writeUTF("java.io.Serializable"); // interfaces // Fields output.writeUTF("sub_toBeNotSerialized"); // name output.writeInt(9); // modifiers output.writeUTF("Ljava/lang/String;"); // signature output.writeUTF("sub_toBeNotSerialized2"); // name output.writeInt(9); // modifiers output.writeUTF("Ljava/lang/String;"); // signature output.writeUTF("sub_toBeSerialized"); // name output.writeInt(1); // modifiers output.writeUTF("Ljava/lang/String;"); // signature output.writeUTF("sub_toBeSerialized3"); // name output.writeInt(1); // modifiers output.writeUTF("Ljava/lang/String;"); // signature output.writeUTF("sub_toBeSerialized4"); // name output.writeInt(1); // modifiers output.writeUTF("Ljava/lang/String;"); // signature output.writeUTF("sub_toBeSerialized5"); // name output.writeInt(1); // modifiers output.writeUTF("Ljava/lang/String;"); // signature // clinit output.writeUTF("<clinit>"); // name output.writeInt(8); // modifiers output.writeUTF("()V"); // signature // constructors output.writeUTF("<init>"); // name output.writeInt(0); // modifiers output.writeUTF("()V"); // signature // methods output.writeUTF("equals"); // name output.writeInt(1); // modifiers output.writeUTF("(Ljava.lang.Object;)Z"); // signature output.flush(); byte[] data = out.toByteArray(); byte[] hash = sha.digest(data); assertTrue("SHA_DATA_2 NOT ok", Arrays.equals(hash, SHA_DATA_2)); } private static final byte[] SHA_DATA_1 = { 90, 36, 111, 106, -32, 38, 4, 126, 21, -51, 107, 45, -64, -68, -109, 112, -31, -46, 34, 115, }; private void testSerializationSHA_DATA_1(MessageDigest sha) throws Exception { sha.reset(); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream output = new DataOutputStream(out); // Made up data output.writeUTF("tests.api.java.security.MessageDigestTest$OptionalDataNotRead"); // name output.writeInt(0); // class modifiers output.writeUTF("java.io.Serializable"); // interfaces // Fields output.writeUTF("class$0"); // name output.writeInt(8); // modifiers output.writeUTF("Ljava/lang/Class;"); // signature output.writeUTF("field1"); // name output.writeInt(2); // modifiers output.writeUTF("I"); // signature output.writeUTF("field2"); // name output.writeInt(2); // modifiers output.writeUTF("I"); // signature // clinit output.writeUTF("<clinit>"); // name output.writeInt(8); // modifiers output.writeUTF("()V"); // signature // constructors output.writeUTF("<init>"); // name output.writeInt(1); // modifiers output.writeUTF("()V"); // signature output.flush(); byte[] data = out.toByteArray(); byte[] hash = sha.digest(data); assertTrue("SHA_DATA_1 NOT ok", Arrays.equals(hash, SHA_DATA_1)); } /** * java.security.MessageDigest#digest() */ public void test_digest() throws Exception { MessageDigest sha = MessageDigest.getInstance("SHA"); assertNotNull(sha); sha.update(MESSAGE.getBytes()); byte[] digest = sha.digest(); assertTrue("bug in SHA", MessageDigest.isEqual(digest, MESSAGE_DIGEST)); sha.reset(); for (int i = 0; i < 63; i++) { // just under buffer capacity sha.update((byte) 'a'); } digest = sha.digest(); assertTrue("bug in SHA", MessageDigest.isEqual(digest, MESSAGE_DIGEST_63_As)); sha.reset(); for (int i = 0; i < 64; i++) { // exact SHA buffer capacity sha.update((byte) 'a'); } digest = sha.digest(); assertTrue("bug in SHA", MessageDigest.isEqual(digest, MESSAGE_DIGEST_64_As)); sha.reset(); for (int i = 0; i < 65; i++) { // just above SHA buffer capacity sha.update((byte) 'a'); } digest = sha.digest(); assertTrue("bug in SHA", MessageDigest.isEqual(digest, MESSAGE_DIGEST_65_As)); testSerializationSHA_DATA_1(sha); testSerializationSHA_DATA_2(sha); } /** * java.security.MessageDigest#digest(byte[]) */ public void test_digest$B() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest digest = MessageDigest.getInstance(algorithm, e.getKey().getName()); assertNotNull(digest); digest.digest(AR1); } } } /** * java.security.MessageDigest#digest(byte[], int, int) */ public void test_digest$BII() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest digest = MessageDigest.getInstance(algorithm, e.getKey().getName()); assertNotNull(digest); int len = digest.getDigestLength(); byte[] digestBytes = new byte[len]; digest.digest(digestBytes, 0, digestBytes.length); } try { MessageDigest.getInstance("SHA").digest(new byte[] {}, Integer.MAX_VALUE, 755); fail(); } catch (IllegalArgumentException expected) { } } } /** * java.security.MessageDigest#update(byte[], int, int) */ public void test_update$BII() throws Exception { try { MessageDigest.getInstance("SHA").update(new byte[] {}, Integer.MAX_VALUE, Integer.MAX_VALUE); fail(); } catch (IllegalArgumentException expected) { } } /** * java.security.MessageDigest#getAlgorithm() */ public void test_getAlgorithm() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest md = MessageDigest.getInstance(algorithm, e.getKey().getName()); assertEquals(algorithm, md.getAlgorithm()); } } } /** * java.security.MessageDigest#getDigestLength() */ public void test_getDigestLength() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest md = MessageDigest.getInstance(algorithm, e.getKey().getName()); assertTrue("length not ok", md.getDigestLength() > 0); } } } /** * java.security.MessageDigest#getInstance(java.lang.String) */ public void test_getInstanceLjava_lang_String() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest md = MessageDigest.getInstance(algorithm); assertNotNull(md); } } try { MessageDigest.getInstance("UnknownDigest"); fail("expected NoSuchAlgorithmException"); } catch (NoSuchAlgorithmException expected) { } } /** * java.security.MessageDigest#getInstance(java.lang.String, * java.lang.String) */ public void test_getInstanceLjava_lang_StringLjava_lang_String() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest md = MessageDigest.getInstance(algorithm, e.getKey().getName()); assertNotNull(md); } } for (List<String> algorithms : digestAlgs.values()) { for (String algorithm : algorithms) { try { MessageDigest.getInstance(algorithm, "UnknownProvider"); fail("expected NoSuchProviderException"); } catch (NoSuchProviderException expected) { } } } for (Provider provider : digestAlgs.keySet()) { try { MessageDigest.getInstance("UnknownDigest", provider.getName()); fail("expected NoSuchAlgorithmException"); } catch (NoSuchAlgorithmException expected) { } } for (Provider provider : digestAlgs.keySet()) { try { MessageDigest.getInstance(null, provider.getName()); fail("expected NullPointerException"); } catch (NullPointerException expected) { } } try { MessageDigest.getInstance("AnyDigest", (String)null); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } /** * java.security.MessageDigest#getInstance(java.lang.String, * java.security.Provider) */ public void test_getInstanceLjava_lang_StringLjava_security_Provider() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest md = MessageDigest.getInstance(algorithm, e.getKey().getName()); assertNotNull(md); } } try { MessageDigest.getInstance(null, new TestProvider()); fail("expected NullPointerException"); } catch (NullPointerException expected) { } try { MessageDigest.getInstance("UnknownDigest", new TestProvider()); fail("expected NoSuchAlgorithmException"); } catch (NoSuchAlgorithmException expected) { } try { MessageDigest.getInstance("AnyDigest", (Provider)null); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } /** * java.security.MessageDigest#getProvider() */ public void test_getProvider() throws Exception { for (Entry<Provider, List<String>> e : digestAlgs.entrySet()) { for (String algorithm : e.getValue()) { MessageDigest md = MessageDigest.getInstance(algorithm, e.getKey().getName()); assertNotNull("provider is null", md.getProvider()); } } } /** * java.security.MessageDigest#isEqual(byte[], byte[]) */ public void test_isEqual$B$B() { assertTrue("isEqual is not correct", MessageDigest.isEqual(AR1, AR2)); } /** * java.security.MessageDigest#toString() */ public void test_toString() throws Exception { String str = MessageDigest.getInstance("SHA").toString(); assertNotNull("toString is null", str); } protected void setUp() throws Exception { super.setUp(); Provider[] providers = Security.getProviders("MessageDigest.SHA"); for (Provider provider : providers) { digestAlgs.put(provider, getDigestAlgorithms(provider)); } } /* * Returns the digest algorithms that the given provider supports. */ private List<String> getDigestAlgorithms(Provider provider) { if (provider == null) { fail("No digest algorithms were found"); } List<String> algs = new ArrayList<String>(); for (Object key : provider.keySet()) { String algorithm = (String) key; if (algorithm.startsWith(MESSAGEDIGEST_ID) && !algorithm.contains(" ")) { algs.add(algorithm.substring(MESSAGEDIGEST_ID.length())); } } if (algs.size() == 0) { fail("No digest algorithms were found"); } return algs; } private class MessageDigestStub extends MessageDigest { public MessageDigestStub(String algorithm) { super(algorithm); } public byte[] engineDigest() { return null; } public void engineReset() { } public void engineUpdate(byte input) { } public void engineUpdate(byte[] input, int offset, int len) { } } private static class TestProvider extends Provider { protected TestProvider() { super("TestProvider", 1.0, "INFO"); } } }
package org.myrobotlab.opencv; import static com.googlecode.javacv.cpp.opencv_core.CV_8UC1; import static com.googlecode.javacv.cpp.opencv_core.cvMat; import static com.googlecode.javacv.cpp.opencv_core.cvReleaseImage; import static com.googlecode.javacv.cpp.opencv_highgui.cvDecodeImage; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.List; import java.util.Map; import javax.imageio.ImageIO; import org.myrobotlab.logging.LoggerFactory; import org.myrobotlab.logging.Logging; import org.slf4j.Logger; import com.googlecode.javacpp.BytePointer; import com.googlecode.javacv.FrameGrabber; import com.googlecode.javacv.cpp.opencv_core.CvMat; import com.googlecode.javacv.cpp.opencv_core.IplImage; public class IPCameraFrameGrabber extends FrameGrabber { /* * excellent reference - http://www.jpegcameras.com/ foscam url * http://host/videostream.cgi?user=username&pwd=password * http://192.168.0.59:60/videostream.cgi?user=admin&pwd=password android ip * cam http://192.168.0.57:8080/videofeed */ public final static Logger log = LoggerFactory.getLogger(IPCameraFrameGrabber.class); private URL url; private URLConnection connection; private InputStream input; private Map<String, List<String>> headerfields; private String boundryKey; private IplImage template = null; private int width, height; public IPCameraFrameGrabber(String urlstr) { try { url = new URL(urlstr); } catch (MalformedURLException e) { Logging.logException(e); } } @Override public void start() { log.info("connecting to " + url); try { connection = url.openConnection(); headerfields = connection.getHeaderFields(); if (headerfields.containsKey("Content-Type")) { List<String> ct = headerfields.get("Content-Type"); for (int i = 0; i < ct.size(); ++i) { String key = ct.get(i); int j = key.indexOf("boundary="); if (j != -1) { boundryKey = key.substring(j + 9); // FIXME << fragile } } } input = connection.getInputStream(); } catch (IOException e) { Logging.logException(e); } } @Override public void stop() { try { input.close(); input = null; connection = null; url = null; } catch (IOException e) { Logging.logException(e); } } @Override public void trigger() throws Exception { } IplImage decoded = null; @Override public IplImage grab() { try { if (template == null) { // create the template for future frames Logging.logTime("creating template"); template = IplImage.createFrom(grabBufferedImage()); Logging.logTime("created template"); } // // IplImage.create(template.width(), template.height(), template.depth(), template.nChannels()); byte[] b = readImage(); CvMat mat = cvMat(1, b.length, CV_8UC1, new BytePointer(b)); if (decoded != null){ //decoded.release(); cvReleaseImage(decoded); } decoded = cvDecodeImage(mat); Logging.logTime("pre - IplImage.cvDecodeImage"); return decoded; } catch (Exception e) { Logging.logException(e); } catch (IOException e) { Logging.logException(e); } return null; } public BufferedImage grabBufferedImage() throws Exception, IOException { BufferedImage bi = ImageIO.read(new ByteArrayInputStream(readImage())); return bi; } byte[] readImage() throws Exception, IOException { byte[] buffer = new byte[4096];// MTU or JPG Frame Size? int n = -1; ByteArrayOutputStream baos = new ByteArrayOutputStream(); StringBuffer sb = new StringBuffer(); int total = 0; int c; // read http subheader while ((c = input.read()) != -1) { if (c > 0) { sb.append((char) c); if (c == 13) { sb.append((char) input.read());// '10'+ c = input.read(); sb.append((char) c); if (c == 13) { sb.append((char) input.read());// '10' break; // done with subheader } } } } // find embedded jpeg in stream String subheader = sb.toString(); log.debug(subheader); int contentLength = -1; // if (boundryKey == null) // { // Yay! - server was nice and sent content length int c0 = subheader.indexOf("Content-Length: "); int c1 = subheader.indexOf('\r', c0); if (c0 < 0) { log.info("no content length returning null"); return null; } else { log.info("found content length"); } c0 += 16; contentLength = Integer.parseInt(subheader.substring(c0, c1).trim()); log.debug("Content-Length: " + contentLength); // adaptive size - careful - don't want a 2G jpeg if (contentLength > buffer.length) { buffer = new byte[contentLength]; } n = -1; total = 0; while ((n = input.read(buffer, 0, contentLength - total)) != -1) { total += n; baos.write(buffer, 0, n); if (total == contentLength) { break; } } baos.flush(); input.read();// \r input.read();// \n input.read();// \r input.read();// \n return baos.toByteArray(); } @Override public void release() throws Exception { } }
package kevindeelen.wolfcode.videreremote.activity; import android.content.Context; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.GestureDetector; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import java.util.ArrayList; import java.util.List; import kevindeelen.wolfcode.videreremote.R; import kevindeelen.wolfcode.videreremote.adapter.NavigationDrawerAdapter; import kevindeelen.wolfcode.videreremote.model.NavigationDrawerItem; /** * Created by Ravi on 29/07/15. */ public class FragmentDrawer extends Fragment { private static String TAG = FragmentDrawer.class.getSimpleName( ); private RecyclerView recyclerView; private ActionBarDrawerToggle mDrawerToggle; private DrawerLayout mDrawerLayout; private NavigationDrawerAdapter adapter; private View containerView; private static String[] titles = null; private FragmentDrawerListener drawerListener; public FragmentDrawer ( ) { } public void setDrawerListener ( FragmentDrawerListener listener ) { this.drawerListener = listener; } public static List<NavigationDrawerItem> getData ( ) { List<NavigationDrawerItem> data = new ArrayList<>( ); // preparing navigation drawer items for ( int i = 0; i < titles.length; i++ ) { NavigationDrawerItem navItem = new NavigationDrawerItem( ); navItem.setTitle( titles[ i ] ); data.add( navItem ); } return data; } @Override public void onCreate ( Bundle savedInstanceState ) { super.onCreate( savedInstanceState ); // drawer labels titles = getActivity( ).getResources( ).getStringArray( R.array.nav_drawer_labels ); } @Override public View onCreateView ( LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState ) { // Inflating view layout View layout = inflater.inflate( R.layout.fragment_navigation_drawer, container, false ); recyclerView = (RecyclerView) layout.findViewById( R.id.drawerList ); adapter = new NavigationDrawerAdapter( getActivity( ), getData( ) ); recyclerView.setAdapter( adapter ); recyclerView.setLayoutManager( new LinearLayoutManager( getActivity( ) ) ); recyclerView.addOnItemTouchListener( new RecyclerTouchListener( getActivity( ), recyclerView, new ClickListener( ) { @Override public void onClick ( View view, int position ) { drawerListener.onDrawerItemSelected( view, position ); mDrawerLayout.closeDrawer( containerView ); } @Override public void onLongClick ( View view, int position ) { } } ) ); return layout; } public void setUp ( int fragmentId, DrawerLayout drawerLayout, final Toolbar toolbar ) { containerView = getActivity( ).findViewById( fragmentId ); mDrawerLayout = drawerLayout; mDrawerToggle = new ActionBarDrawerToggle( getActivity( ), drawerLayout, toolbar, R.string.drawer_open, R.string.drawer_close ) { @Override public void onDrawerOpened ( View drawerView ) { super.onDrawerOpened( drawerView ); getActivity( ).invalidateOptionsMenu( ); } @Override public void onDrawerClosed ( View drawerView ) { super.onDrawerClosed( drawerView ); getActivity( ).invalidateOptionsMenu( ); } @Override public void onDrawerSlide ( View drawerView, float slideOffset ) { super.onDrawerSlide( drawerView, slideOffset ); toolbar.setAlpha( 1 - slideOffset / 2 ); } }; mDrawerLayout.setDrawerListener( mDrawerToggle ); mDrawerLayout.post( new Runnable( ) { @Override public void run ( ) { mDrawerToggle.syncState( ); } } ); } public static interface ClickListener { public void onClick ( View view, int position ); public void onLongClick ( View view, int position ); } static class RecyclerTouchListener implements RecyclerView.OnItemTouchListener { private GestureDetector gestureDetector; private ClickListener clickListener; public RecyclerTouchListener ( Context context, final RecyclerView recyclerView, final ClickListener clickListener ) { this.clickListener = clickListener; gestureDetector = new GestureDetector( context, new GestureDetector.SimpleOnGestureListener( ) { @Override public boolean onSingleTapUp ( MotionEvent e ) { return true; } @Override public void onLongPress ( MotionEvent e ) { View child = recyclerView.findChildViewUnder( e.getX( ), e.getY( ) ); if ( child != null && clickListener != null ) { clickListener.onLongClick( child, recyclerView.getChildPosition( child ) ); } } } ); } @Override public boolean onInterceptTouchEvent ( RecyclerView rv, MotionEvent e ) { View child = rv.findChildViewUnder( e.getX( ), e.getY( ) ); if ( child != null && clickListener != null && gestureDetector.onTouchEvent( e ) ) { clickListener.onClick( child, rv.getChildPosition( child ) ); } return false; } @Override public void onTouchEvent ( RecyclerView rv, MotionEvent e ) { } @Override public void onRequestDisallowInterceptTouchEvent ( boolean disallowIntercept ) { } } public interface FragmentDrawerListener { public void onDrawerItemSelected ( View view, int position ); } }
/* * Copyright (C) 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.api.gwt.oauth2.client; import com.google.api.gwt.oauth2.client.Auth.TokenInfo; import com.google.gwt.core.client.Callback; import com.google.gwt.core.client.Scheduler.ScheduledCommand; import com.google.gwt.core.client.testing.StubScheduler; import junit.framework.TestCase; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Tests for {@link Auth}. * * @author jasonhall@google.com (Jason Hall) */ public class AuthTest extends TestCase { private MockAuth auth; @Override protected void setUp() throws Exception { super.setUp(); auth = new MockAuth(); } /** * When the request does not have a token stored, the popup is used to get the * token. */ public void testLogin_noToken() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); MockCallback callback = new MockCallback(); auth.login(req, callback); // The popup was used and the iframe wasn't. assertTrue(auth.loggedInViaPopup); assertEquals("url?client_id=clientId&response_type=token&scope=scope&redirect_uri=popup.html", auth.lastUrl); } /** * When the token is found in cookies, but may expire soon, the popup will be * used to refresh the token. */ public void testLogin_expiringSoon() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); // Storing a token that expires soon (in just under 10 minutes) TokenInfo info = new TokenInfo(); info.accessToken = "expired"; info.expires = String.valueOf(MockClock.now + 10 * 60 * 1000 - 1); auth.setToken(req, info); MockCallback callback = new MockCallback(); auth.login(req, callback); assertTrue(auth.expiringSoon(info)); assertTrue(auth.loggedInViaPopup); assertEquals("url?client_id=clientId&response_type=token&scope=scope&redirect_uri=popup.html", auth.lastUrl); } /** * When the token is found in cookies and will not expire soon, neither popup * nor iframe is used, and the token is immediately passed to the callback. */ public void testLogin_notExpiringSoon() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); // Storing a token that does not expire soon (in exactly 10 minutes) TokenInfo info = new TokenInfo(); info.accessToken = "notExpiringSoon"; info.expires = String.valueOf(MockClock.now + 10 * 60 * 1000); auth.setToken(req, info); MockCallback callback = new MockCallback(); auth.login(req, callback); // A deferred command will have been scheduled. Execute it. List<ScheduledCommand> deferred = ((StubScheduler) auth.scheduler).getScheduledCommands(); assertEquals(1, deferred.size()); deferred.get(0).execute(); // The iframe was used and the popup wasn't. assertFalse(auth.loggedInViaPopup); // onSuccess() was called and onFailure() wasn't. assertEquals("notExpiringSoon", callback.token); assertNull(callback.failure); } /** * When the token is found in cookies and does not specify an expire time, the * iframe will be used to refresh the token without displaying the popup. */ public void testLogin_nullExpires() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); // Storing a token with a null expires time TokenInfo info = new TokenInfo(); info.accessToken = "longToken"; info.expires = null; auth.setToken(req, info); MockCallback callback = new MockCallback(); auth.login(req, callback); // TODO(jasonhall): When Auth supports immediate mode for supporting // providers, a null expiration will trigger an iframe immediate-mode // refresh. Until then, the popup is always used. assertTrue(auth.loggedInViaPopup); } /** * When finish() is called, the callback passed to login() is executed with * the correct token, and a cookie is set with relevant information, expiring * in the correct amount of time. */ public void testFinish() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); MockCallback callback = new MockCallback(); auth.login(req, callback); // Simulates the auth provider's response auth.finish("#access_token=foo&expires_in=10000"); // onSuccess() was called and onFailure() wasn't assertEquals("foo", callback.token); assertNull(callback.failure); // A token was stored as a result InMemoryTokenStore ts = (InMemoryTokenStore) auth.tokenStore; assertEquals(1, ts.store.size()); // That token is clientId+scope -> foo+expires TokenInfo info = TokenInfo.fromString(ts.store.get("clientId-----scope")); assertEquals("foo", info.accessToken); assertEquals("1.0005E7", info.expires); } /** * If finish() is passed a bad hash from the auth provider, a RuntimeException * will be passed to the callback. */ public void testFinish_badHash() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); MockCallback callback = new MockCallback(); auth.login(req, callback); // Simulates the auth provider's response auth.finish("#foobarbaznonsense"); // onFailure() was called with a RuntimeException stating the error. assertNotNull(callback.failure); assertTrue(callback.failure instanceof RuntimeException); assertEquals("Could not find access_token in hash #foobarbaznonsense", ((RuntimeException) callback.failure).getMessage()); // onSuccess() was not called. assertNull(callback.token); } /** * If finish() is passed an access token but no expires time, a TokenInfo will * be stored without an expiration time. The next time auth is requested, the * iframe will be used, see {@link #testLogin_nullExpires()}. */ public void testFinish_noExpires() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); MockCallback callback = new MockCallback(); auth.login(req, callback); // Simulates the auth provider's response auth.finish("#access_token=foo"); // onSuccess() was called and onFailure() wasn't assertEquals("foo", callback.token); assertNull(callback.failure); // A token was stored as a result InMemoryTokenStore ts = (InMemoryTokenStore) auth.tokenStore; assertEquals(1, ts.store.size()); // That token is clientId+scope -> foo+expires TokenInfo info = TokenInfo.fromString(ts.store.get("clientId-----scope")); assertEquals("foo", info.accessToken); assertNull(info.expires); } /** * If finish() is passed a hash that describes an error condition, a * RuntimeException will be passed to onFailure() with the provider's auth * string. */ public void testFinish_error() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); MockCallback callback = new MockCallback(); auth.login(req, callback); // Simulates the auth provider's error response, with the error first, last, // and in the middle of the hash, and as the only element in the hash. Also // finds error descriptions and error URIs. assertError( callback, "#error=redirect_uri_mismatch", "Error from provider: redirect_uri_mismatch"); assertError(callback, "#error=redirect_uri_mismatch&foo=bar", "Error from provider: redirect_uri_mismatch"); assertError(callback, "#foo=bar&error=redirect_uri_mismatch", "Error from provider: redirect_uri_mismatch"); assertError(callback, "#foo=bar&error=redirect_uri_mismatch&bar=baz", "Error from provider: redirect_uri_mismatch"); assertError(callback, "#foo=bar&error=redirect_uri_mismatch&error_description=Bad dog!", "Error from provider: redirect_uri_mismatch (Bad dog!)"); assertError(callback, "#foo=bar&error=redirect_uri_mismatch&error_uri=example.com", "Error from provider: redirect_uri_mismatch; see: example.com"); assertError(callback, "#foo=bar&error=redirect_uri_mismatch&error_description=Bad dog!&error_uri=example.com", "Error from provider: redirect_uri_mismatch (Bad dog!); see: example.com"); // If the hash contains a key that ends in error, but not error=, the error // will be that the hash was malformed assertError(callback, "#wxyzerror=redirect_uri_mismatch", "Could not find access_token in hash #wxyzerror=redirect_uri_mismatch"); } private void assertError(MockCallback callback, String hash, String error) { // Simulates the auth provider's error response. auth.finish(hash); // onFailure() was called with a RuntimeException stating the error. assertNotNull(callback.failure); assertTrue(callback.failure instanceof RuntimeException); assertEquals(error, ((RuntimeException) callback.failure).getMessage()); // onSuccess() was not called. assertNull(callback.token); } public void testExpiresInfo() { AuthRequest req = new AuthRequest("url", "clientId").withScopes("scope"); auth.login(req, new MockCallback()); // Simulates the auth provider's response (expires in 10s) auth.finish("#access_token=foo&expires_in=10"); MockClock.now += 1000; // Fast forward 1s assertEquals(9000.0, auth.expiresIn(req)); MockClock.now += 10000; // Fast forward another 10s assertEquals(-1000.0, auth.expiresIn(req)); // A request that has no corresponding token expires in -1ms AuthRequest newReq = new AuthRequest("another-url", "another-clientId").withScopes("scope"); assertEquals(Double.NEGATIVE_INFINITY, auth.expiresIn(newReq)); } private static class MockAuth extends Auth { private boolean loggedInViaPopup; private String lastUrl; private static final TokenStore TOKEN_STORE = new InMemoryTokenStore(); MockAuth() { super(TOKEN_STORE, new MockClock(), new MockUrlCodex(), new StubScheduler(), "popup.html"); } @Override void doLogin(String authUrl, Callback<String, Throwable> callback) { loggedInViaPopup = true; lastUrl = authUrl; } } static class MockClock implements Auth.Clock { static double now = 5000; @Override public double now() { return now; } } static class MockUrlCodex implements Auth.UrlCodex { @Override public String encode(String url) { return url; } @Override public String decode(String url) { return url; } } private static class InMemoryTokenStore implements TokenStore { Map<String, String> store = new HashMap<String, String>(); @Override public void set(String key, String value) { store.put(key, value); } @Override public String get(String key) { return store.get(key); } @Override public void clear() { store.clear(); } } private static class MockCallback implements Callback<String, Throwable> { private String token; private Throwable failure; @Override public void onSuccess(String token) { this.token = token; } @Override public void onFailure(Throwable caught) { this.failure = caught; } } }
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.distributed.build_slave; import com.facebook.buck.core.build.engine.RuleDepsCache; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.BuildRuleResolver; import com.facebook.buck.core.util.log.Logger; import com.facebook.buck.distributed.ArtifactCacheByBuildRule; import com.facebook.buck.distributed.ClientStatsTracker; import com.facebook.buck.distributed.build_slave.DistributableBuildGraph.DistributableNode; import com.facebook.buck.util.RichStream; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Queues; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.Map; import java.util.Objects; import java.util.Queue; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Helper class for creating a {@link BuildTargetsQueue} optimized by using the remote and local * artifact caches, and to upload critical, missing artifacts from the local cache to the remote * cache. */ public class CacheOptimizedBuildTargetsQueueFactory { private static final Logger LOG = Logger.get(CacheOptimizedBuildTargetsQueueFactory.class); private final BuildRuleResolver resolver; private final ArtifactCacheByBuildRule artifactCache; private final RuleDepsCache ruleDepsCache; private final boolean isDeepBuild; private final boolean shouldBuildSelectedTargetsLocally; private class GraphTraversalData { Map<String, Set<String>> allReverseDeps = new HashMap<>(); Map<String, Set<String>> allForwardDeps = new HashMap<>(); Set<BuildRule> visitedRules = new HashSet<>(); Set<BuildRule> prunedRules = new HashSet<>(); Set<String> uncachableTargets = new HashSet<>(); Set<String> buildLocallyTargets = new HashSet<>(); } public CacheOptimizedBuildTargetsQueueFactory( BuildRuleResolver resolver, ArtifactCacheByBuildRule artifactCache, boolean isDeepRemoteBuild, RuleDepsCache ruleDepsCache, boolean shouldBuildSelectedTargetsLocally) { this.resolver = resolver; this.artifactCache = artifactCache; this.isDeepBuild = isDeepRemoteBuild; this.ruleDepsCache = ruleDepsCache; this.shouldBuildSelectedTargetsLocally = shouldBuildSelectedTargetsLocally; if (isDeepBuild) { LOG.info("Deep build requested. Will not prune BuildTargetsQueue using the remote cache."); } } private boolean doesRuleNeedToBeScheduled(BuildRule rule) { if (isDeepBuild || !rule.isCacheable()) { // We need to schedule all uncachables in order to parse the nodes below them. return true; } boolean canBeSkipped = artifactCache.localContains(rule) || artifactCache.remoteContains(rule); if (canBeSkipped && hasMissingCachableRuntimeDeps(rule)) { canBeSkipped = false; LOG.verbose( "Target [%s] is present in the cache, but still needs to be scheduled, " + "because some of its cachable runtime dependencies are missing.", ruleToTarget(rule)); // TODO (alisdair, shivanker): Ideally we should not be scheduling `rule`, but only the // missing cachable runtime deps themselves, as direct dependencies of the parent rule. // Because otherwise, we might have a scenario where the some uncachable runtime deps get // built twice on different minions because of scheduling `rule` on its own. } else if (canBeSkipped) { LOG.verbose( "Target [%s] can be skipped because it is already present in the cache.", ruleToTarget(rule)); } return !canBeSkipped; } private Queue<BuildRule> processTopLevelTargets(Iterable<BuildTarget> targetsToBuild) { return RichStream.from(targetsToBuild) .map(resolver::getRule) .collect(Collectors.toCollection(LinkedList::new)); } private void prewarmRemoteCache(Set<BuildRule> rules) { if (!isDeepBuild) { artifactCache.prewarmRemoteContains( rules.stream().filter(BuildRule::isCacheable).collect(ImmutableSet.toImmutableSet())); } } private boolean hasMissingCachableRuntimeDeps(BuildRule rule) { prewarmRemoteCache(ruleDepsCache.getRuntimeDeps(rule)); Stream<BuildRule> missingCachableRuntimeDeps = ruleDepsCache .getRuntimeDeps(rule) .stream() .filter( dependency -> dependency.isCacheable() && !artifactCache.remoteContains(dependency) && !artifactCache.localContains(dependency)); return missingCachableRuntimeDeps.count() > 0; } private void uploadRuleIfRequired(BuildRule rule) { String targetName = ruleToTarget(rule); if (rule.isCacheable() && !artifactCache.remoteContains(rule) && artifactCache.localContains(rule)) { // Let's upload this rule. LOG.debug( "Uploading target [%s] because it is present in the local cache, but not in the " + "remote cache.", targetName); try { artifactCache.uploadFromLocal(rule); } catch (IOException e) { LOG.error(e, "Failed to upload target [%s]", targetName); throw new RuntimeException(e); } } } private void uploadRuleAndRuntimeDeps(BuildRule rule) { uploadRuleIfRequired(rule); ruleDepsCache.getRuntimeDeps(rule).forEach(this::uploadRuleIfRequired); } private GraphTraversalData traverseActionGraph(Queue<BuildRule> buildRulesToProcess) { GraphTraversalData results = new GraphTraversalData(); results.visitedRules.addAll(buildRulesToProcess); while (!buildRulesToProcess.isEmpty()) { BuildRule rule = buildRulesToProcess.remove(); String target = ruleToTarget(rule); if (!rule.isCacheable()) { results.uncachableTargets.add(target); } if (shouldBuildSelectedTargetsLocally && rule.shouldBuildLocally()) { results.buildLocallyTargets.add(target); } results.allForwardDeps.put(target, new HashSet<>()); // Get all build dependencies (regular and runtime) ImmutableSet<BuildRule> allDeps = ImmutableSet.copyOf(ruleDepsCache.get(rule)); for (BuildRule dependencyRule : allDeps) { // Uploads need to happen regardless of something needs to be scheduled or not. // If it is not scheduled, distributed build must be planning to use it. // If it is scheduled and we have it locally, distributed build is going to benefit from it. // But to avoid the possible additional cost of rule-key calculation, we should check if we // even have any local cache or not. if (artifactCache.isLocalCachePresent()) { uploadRuleAndRuntimeDeps(dependencyRule); } if (!doesRuleNeedToBeScheduled(dependencyRule)) { // TODO(shivanker): Re-distribute new found sub-tree of work if contains returned true, // but actual fetch failed. Since multi-contains is only best-effort, it might turn out // that when we finally need to fetch this target, it's missing. Then the slave who is // supposed to build it will end up building the whole sub-tree locally. Ideally, we // should be able to re-distribute this new-found chunk of work. continue; } String dependencyTarget = ruleToTarget(dependencyRule); if (!results.allReverseDeps.containsKey(dependencyTarget)) { results.allReverseDeps.put(dependencyTarget, new HashSet<>()); } Objects.requireNonNull(results.allReverseDeps.get(dependencyTarget)).add(target); Objects.requireNonNull(results.allForwardDeps.get(target)).add(dependencyTarget); if (!results.visitedRules.contains(dependencyRule)) { results.visitedRules.add(dependencyRule); buildRulesToProcess.add(dependencyRule); } } } return results; } private GraphTraversalData traverseGraphFromTopLevelUsingAvailableCaches( Iterable<BuildTarget> topLevelTargets) { // Start with a set of every node in the graph LOG.debug("Recording all rules."); Set<BuildRule> allRules = findAllRulesInGraph(topLevelTargets); LOG.debug("Processing top-level targets."); Queue<BuildRule> buildRulesToProcess = processTopLevelTargets(topLevelTargets); if (!buildRulesToProcess.isEmpty() && !isDeepBuild) { // Check the cache for everything we are going to need upfront. LOG.debug("Pre-warming remote cache contains for all known rules."); prewarmRemoteCache(allRules); } LOG.debug("Traversing %d top-level targets now.", buildRulesToProcess.size()); GraphTraversalData graphTraversalData = traverseActionGraph(buildRulesToProcess); // Now remove the nodes that will be scheduled from set of all nodes, to find the pruned ones. graphTraversalData.prunedRules.addAll(allRules); graphTraversalData.prunedRules.removeAll(graphTraversalData.visitedRules); return graphTraversalData; } private Set<BuildRule> findAllRulesInGraph(Iterable<BuildTarget> topLevelTargets) { Set<BuildRule> allRules = new HashSet<>(); Queue<BuildRule> rulesToProcess = RichStream.from(topLevelTargets) .map(resolver::getRule) .collect(Collectors.toCollection(LinkedList::new)); while (!rulesToProcess.isEmpty()) { BuildRule buildRule = rulesToProcess.remove(); if (allRules.contains(buildRule)) { continue; } allRules.add(buildRule); rulesToProcess.addAll(ruleDepsCache.get(buildRule)); } return allRules; } private Set<String> findTransitiveBuildLocallyTargets(GraphTraversalData graphData) { Set<String> transitiveBuildLocallyTargets = new HashSet<>(graphData.buildLocallyTargets); Queue<String> targetsToProcess = Queues.newArrayDeque(graphData.buildLocallyTargets); while (!targetsToProcess.isEmpty()) { String target = targetsToProcess.remove(); if (graphData.allReverseDeps.containsKey(target)) { for (String revDep : graphData.allReverseDeps.get(target)) { if (transitiveBuildLocallyTargets.add(revDep)) { targetsToProcess.add(revDep); } } } } return transitiveBuildLocallyTargets; } /** * Upload the smallest set of cachable {@link BuildRule}s from the dir-cache, which can help the * remote servers in finishing the build faster. * * @param targetsToBuild Top-level targets which this build needs to optimize for. * @param clientStatsTracker For tracking some timing/perf metrics for the Stampede client. * @return Future to track the progress of the uploads. */ public ListenableFuture<?> uploadCriticalNodesFromLocalCache( Iterable<BuildTarget> targetsToBuild, ClientStatsTracker clientStatsTracker) { clientStatsTracker.startTimer( ClientStatsTracker.DistBuildClientStat.LOCAL_UPLOAD_FROM_DIR_CACHE); traverseGraphFromTopLevelUsingAvailableCaches(targetsToBuild); return Futures.transform( Futures.allAsList(artifactCache.getAllUploadRuleFutures()), results -> { clientStatsTracker.stopTimer( ClientStatsTracker.DistBuildClientStat.LOCAL_UPLOAD_FROM_DIR_CACHE); clientStatsTracker.setMissingRulesUploadedFromDirCacheCount(results.size()); return null; }, MoreExecutors.directExecutor()); } /** * Create {@link BuildTargetsQueue} with the given parameters. * * @param targetsToBuild top-level targets that need to be built. * @return an instance of {@link BuildTargetsQueue} with the top-level targets at the root. */ public ReverseDepBuildTargetsQueue createBuildTargetsQueue( Iterable<BuildTarget> targetsToBuild, CoordinatorBuildRuleEventsPublisher coordinatorBuildRuleEventsPublisher, int mostBuildRulesFinishedPercentageThreshold) { LOG.info("Starting to create the %s.", BuildTargetsQueue.class.getName()); GraphTraversalData results = traverseGraphFromTopLevelUsingAvailableCaches(targetsToBuild); // Notify distributed build clients that they should not wait for any of the nodes that were // pruned (as they will never be built remotely) ImmutableList<String> prunedTargets = ImmutableList.copyOf( results .prunedRules .stream() .filter(BuildRule::isCacheable) // Client always skips uncacheables .map(BuildRule::getFullyQualifiedName) .collect(Collectors.toList())); int numTotalCachableRules = results.visitedRules.size() - results.uncachableTargets.size() + prunedTargets.size(); LOG.info( String.format( "[%d/%d] cacheable build rules were pruned from graph.", prunedTargets.size(), numTotalCachableRules)); coordinatorBuildRuleEventsPublisher.createBuildRuleStartedEvents(prunedTargets); coordinatorBuildRuleEventsPublisher.createBuildRuleCompletionEvents(prunedTargets); if (shouldBuildSelectedTargetsLocally) { // Consider all (transitively) 'buildLocally' rules as uncachable for DistBuild purposes - we // cannot build them remotely and, hence, we cannot put them in cache for local client to // consume. // NOTE: this needs to be after uncacheability property is used for graph nodes visiting (and, // hence, pruning and scheduling) - we want caches to be checked for these rules while doing // the visiting (local build could have uploaded artifacts for these rules). ImmutableList<String> transitiveBuildLocallyTargets = ImmutableList.copyOf(findTransitiveBuildLocallyTargets(results)); results.uncachableTargets.addAll(transitiveBuildLocallyTargets); // Unlock all rules which will not be built remotely so that local client does not get stuck // waiting for them (some of them may be cachable from client point of view). DO NOT use // completed/finished events as we are building deps of these rules remotely. coordinatorBuildRuleEventsPublisher.createBuildRuleUnlockedEvents( transitiveBuildLocallyTargets); } // Do the reference counting and create the EnqueuedTargets. ImmutableSet.Builder<DistributableNode> zeroDependencyNodes = ImmutableSet.builder(); ImmutableMap.Builder<String, DistributableNode> allNodes = ImmutableMap.builder(); for (BuildRule buildRule : results.visitedRules) { String target = buildRule.getFullyQualifiedName(); Iterable<String> currentRevDeps; if (results.allReverseDeps.containsKey(target)) { currentRevDeps = results.allReverseDeps.get(target); } else { currentRevDeps = new ArrayList<>(); } DistributableNode distributableNode = new DistributableNode( target, ImmutableSet.copyOf(currentRevDeps), ImmutableSet.copyOf(Objects.requireNonNull(results.allForwardDeps.get(target))), results.uncachableTargets.contains(target)); allNodes.put(target, distributableNode); if (distributableNode.areAllDependenciesResolved()) { zeroDependencyNodes.add(distributableNode); } } // Wait for local uploads (in case of local coordinator) to finish. try { LOG.info("Waiting for cache uploads to finish."); Futures.allAsList(artifactCache.getAllUploadRuleFutures()).get(); } catch (InterruptedException | ExecutionException e) { LOG.error(e, "Failed to upload artifacts from the local cache."); } return new ReverseDepBuildTargetsQueue( new DistributableBuildGraph(allNodes.build(), zeroDependencyNodes.build()), mostBuildRulesFinishedPercentageThreshold); } private static String ruleToTarget(BuildRule rule) { return rule.getFullyQualifiedName(); } }
package com.wsfmn.view.model; import android.test.ActivityInstrumentationTestCase2; import com.wsfmn.model.Date; import com.wsfmn.exceptions.DateNotValidException; import com.wsfmn.model.Habit; import com.wsfmn.exceptions.HabitReasonTooLongException; import com.wsfmn.exceptions.HabitTitleTooLongException; import com.wsfmn.model.WeekDays; /** * Created by musaed on 2017-10-21. */ public class HabitTest extends ActivityInstrumentationTestCase2 { public HabitTest(){ super(Habit.class); } public void testGetId(){ Habit habit = null; try{ habit = new Habit("Swimming", "To spend time with friends", new Date()); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ } catch (DateNotValidException e){ //null } assertNotNull("Habit ID was not null", habit.getId()); } public void testSetId(){ Habit habit = null; try{ habit = new Habit("Eating At A Restaurant", "To spend time with family", new Date()); habit.setId("My Unique ID"); assertNotNull("Habit ID was null", habit.getId()); assertEquals("Habit ID was not equal to the one set.", habit.getId(), "My Unique ID"); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ } catch (DateNotValidException e){ //null } } public void testGetTitle() { Habit habit = null; try { habit = new Habit("Visiting the BookStore", new Date()); assertEquals("Visiting the BookStore", habit.getTitle()); } catch (HabitTitleTooLongException e) { //null } catch (DateNotValidException e) { //null } } public void testSetTitle(){ Habit habit = null; try{ habit = new Habit("Visiting the Hospital", new Date()); habit.setTitle("Playing Poker"); assertEquals("Playing Poker", habit.getTitle()); } catch(HabitTitleTooLongException e){ //null } catch(DateNotValidException e){ //null } } public void testGetReason(){ Habit habit = null; try{ habit = new Habit("Visiting the Hospital", "To check on my wife", new Date()); assertEquals(habit.getReason(), "To check on my wife"); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ //null } catch (DateNotValidException e){ //null } } public void testSetReason(){ Habit habit = null; try{ habit = new Habit("Play Football", "To lose weight", new Date()); habit.setReason("To lose weight and have fun"); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ //null } catch(DateNotValidException e){ //null } assertEquals(habit.getReason(), "To lose weight and have fun"); } public void testGetDate(){ Date date = new Date(); Habit habit = null; try { habit = new Habit("Swimming", date); } catch(HabitTitleTooLongException e){ //null } catch(DateNotValidException e){ //null } assertTrue(habit.getDate().equalDate(date)); } public void testSetDate(){ Date date1 = new Date(); Date date2 = new Date(date1.getYear(), date1.getMonth(), date1.getDay()+1); Habit habit = null; try { habit = new Habit("Playing Poker", date1); habit.setDate(date2); } catch(HabitTitleTooLongException e){ //null } catch(DateNotValidException e){ //null } assertTrue(habit.getDate().equalDate(date2)); } public void testGetWeekDays(){ Habit habit = null; try{ habit = new Habit("Playing Poker", "To spend time with friends", new Date(), new WeekDays()); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ //null } catch(DateNotValidException e){ //null } assertNotNull(habit.getWeekDays()); } public void testSetWeekDays(){ Habit habit = null; try{ habit = new Habit("Running", "To lose weight", new Date(), new WeekDays()); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ //null } catch(DateNotValidException e){ //null } WeekDays weekDays = new WeekDays(); weekDays.setDay(WeekDays.MONDAY); habit.setWeekDays(weekDays); assertNotNull(habit.getWeekDays()); assertEquals(habit.getWeekDays().getDay(WeekDays.MONDAY), true); for(int i = 1; i < 7; i++){ assertEquals(habit.getWeekDays().getDay(i), false); } } public void testToString() { Habit habit = null; Date date = new Date(); try{ habit = new Habit("Play Football", "To spend time with son", new Date()); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ } catch (DateNotValidException e){ //null } assertTrue("Habit as a string was incorrect", habit.toString().contains("Play Football"+" "+date)); } public void testSetTitleException(){ Habit habit = null; try{ habit = new Habit("a title that is more than twenty characters", new Date()); } catch(HabitTitleTooLongException e) { assertNull("Title Constraint Enforcement Failed", habit); } catch(DateNotValidException e){ //null } } public void testSetReasonException(){ Habit habit = null; try{ habit = new Habit("title", "a reason that contains more than thirty characters", new Date()); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ assertNull("Reason Constraint Enforcement Failed", habit); } catch(DateNotValidException e){ //null } } public void testSetDateException(){ Habit habit = null; try{ habit = new Habit("title", "reason", new Date(2017, 10, 10)); } catch(HabitTitleTooLongException e){ //null } catch(HabitReasonTooLongException e){ //null } catch(DateNotValidException e){ assertNull("Date Constraint Enforcement Failed", habit); } } public void testCalDay(){ Habit habit = new Habit(new Date(2017, 10, 26), new WeekDays()); int result = 0; habit.getWeekDays().setDay(WeekDays.FRIDAY); habit.getWeekDays().setDay(WeekDays.MONDAY); habit.getWeekDays().setDay(WeekDays.TUESDAY); result = habit.caldays(habit.getDate().getDay(), habit.getDate().getDayOfWeek(), habit.getDate().getDaysInMonth()); assertEquals(result, 3); } public void testTotalOccurrence(){ Date start = new Date(2017, 10, 20); Date end = new Date(2017, 11, 10); WeekDays weekDays = new WeekDays(); weekDays.setDay(WeekDays.SATURDAY); weekDays.setDay(WeekDays.SUNDAY); Habit habit = new Habit(start, weekDays); assertEquals(habit.totalOccurrence(start, end), 6); } public void testTotalOccurrenceSameDate(){ Date start = new Date(2017, 11, 11); Date end = new Date(2017, 11, 11); WeekDays weekDays = new WeekDays(); weekDays.setDay(WeekDays.SATURDAY); Habit habit = new Habit(start, weekDays); assertEquals(habit.totalOccurrence(start, end), 1); } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.language.v1; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.protobuf.GeneratedMessageV3; import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @javax.annotation.Generated("by GAPIC") public class LanguageServiceClientTest { private static MockLanguageService mockLanguageService; private static MockServiceHelper serviceHelper; private LanguageServiceClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockLanguageService = new MockLanguageService(); serviceHelper = new MockServiceHelper("in-process-1", Arrays.<MockGrpcService>asList(mockLanguageService)); serviceHelper.start(); } @AfterClass public static void stopServer() { serviceHelper.stop(); } @Before public void setUp() throws IOException { serviceHelper.reset(); channelProvider = serviceHelper.createChannelProvider(); LanguageServiceSettings settings = LanguageServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = LanguageServiceClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test @SuppressWarnings("all") public void analyzeSentimentTest() { String language = "language-1613589672"; AnalyzeSentimentResponse expectedResponse = AnalyzeSentimentResponse.newBuilder().setLanguage(language).build(); mockLanguageService.addResponse(expectedResponse); Document document = Document.newBuilder().build(); AnalyzeSentimentResponse actualResponse = client.analyzeSentiment(document); Assert.assertEquals(expectedResponse, actualResponse); List<GeneratedMessageV3> actualRequests = mockLanguageService.getRequests(); Assert.assertEquals(1, actualRequests.size()); AnalyzeSentimentRequest actualRequest = (AnalyzeSentimentRequest) actualRequests.get(0); Assert.assertEquals(document, actualRequest.getDocument()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test @SuppressWarnings("all") public void analyzeSentimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockLanguageService.addException(exception); try { Document document = Document.newBuilder().build(); client.analyzeSentiment(document); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception } } @Test @SuppressWarnings("all") public void analyzeEntitiesTest() { String language = "language-1613589672"; AnalyzeEntitiesResponse expectedResponse = AnalyzeEntitiesResponse.newBuilder().setLanguage(language).build(); mockLanguageService.addResponse(expectedResponse); Document document = Document.newBuilder().build(); EncodingType encodingType = EncodingType.NONE; AnalyzeEntitiesResponse actualResponse = client.analyzeEntities(document, encodingType); Assert.assertEquals(expectedResponse, actualResponse); List<GeneratedMessageV3> actualRequests = mockLanguageService.getRequests(); Assert.assertEquals(1, actualRequests.size()); AnalyzeEntitiesRequest actualRequest = (AnalyzeEntitiesRequest) actualRequests.get(0); Assert.assertEquals(document, actualRequest.getDocument()); Assert.assertEquals(encodingType, actualRequest.getEncodingType()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test @SuppressWarnings("all") public void analyzeEntitiesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockLanguageService.addException(exception); try { Document document = Document.newBuilder().build(); EncodingType encodingType = EncodingType.NONE; client.analyzeEntities(document, encodingType); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception } } @Test @SuppressWarnings("all") public void analyzeEntitySentimentTest() { String language = "language-1613589672"; AnalyzeEntitySentimentResponse expectedResponse = AnalyzeEntitySentimentResponse.newBuilder().setLanguage(language).build(); mockLanguageService.addResponse(expectedResponse); Document document = Document.newBuilder().build(); EncodingType encodingType = EncodingType.NONE; AnalyzeEntitySentimentResponse actualResponse = client.analyzeEntitySentiment(document, encodingType); Assert.assertEquals(expectedResponse, actualResponse); List<GeneratedMessageV3> actualRequests = mockLanguageService.getRequests(); Assert.assertEquals(1, actualRequests.size()); AnalyzeEntitySentimentRequest actualRequest = (AnalyzeEntitySentimentRequest) actualRequests.get(0); Assert.assertEquals(document, actualRequest.getDocument()); Assert.assertEquals(encodingType, actualRequest.getEncodingType()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test @SuppressWarnings("all") public void analyzeEntitySentimentExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockLanguageService.addException(exception); try { Document document = Document.newBuilder().build(); EncodingType encodingType = EncodingType.NONE; client.analyzeEntitySentiment(document, encodingType); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception } } @Test @SuppressWarnings("all") public void analyzeSyntaxTest() { String language = "language-1613589672"; AnalyzeSyntaxResponse expectedResponse = AnalyzeSyntaxResponse.newBuilder().setLanguage(language).build(); mockLanguageService.addResponse(expectedResponse); Document document = Document.newBuilder().build(); EncodingType encodingType = EncodingType.NONE; AnalyzeSyntaxResponse actualResponse = client.analyzeSyntax(document, encodingType); Assert.assertEquals(expectedResponse, actualResponse); List<GeneratedMessageV3> actualRequests = mockLanguageService.getRequests(); Assert.assertEquals(1, actualRequests.size()); AnalyzeSyntaxRequest actualRequest = (AnalyzeSyntaxRequest) actualRequests.get(0); Assert.assertEquals(document, actualRequest.getDocument()); Assert.assertEquals(encodingType, actualRequest.getEncodingType()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test @SuppressWarnings("all") public void analyzeSyntaxExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockLanguageService.addException(exception); try { Document document = Document.newBuilder().build(); EncodingType encodingType = EncodingType.NONE; client.analyzeSyntax(document, encodingType); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception } } @Test @SuppressWarnings("all") public void classifyTextTest() { ClassifyTextResponse expectedResponse = ClassifyTextResponse.newBuilder().build(); mockLanguageService.addResponse(expectedResponse); Document document = Document.newBuilder().build(); ClassifyTextResponse actualResponse = client.classifyText(document); Assert.assertEquals(expectedResponse, actualResponse); List<GeneratedMessageV3> actualRequests = mockLanguageService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ClassifyTextRequest actualRequest = (ClassifyTextRequest) actualRequests.get(0); Assert.assertEquals(document, actualRequest.getDocument()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test @SuppressWarnings("all") public void classifyTextExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockLanguageService.addException(exception); try { Document document = Document.newBuilder().build(); client.classifyText(document); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception } } @Test @SuppressWarnings("all") public void annotateTextTest() { String language = "language-1613589672"; AnnotateTextResponse expectedResponse = AnnotateTextResponse.newBuilder().setLanguage(language).build(); mockLanguageService.addResponse(expectedResponse); Document document = Document.newBuilder().build(); AnnotateTextRequest.Features features = AnnotateTextRequest.Features.newBuilder().build(); EncodingType encodingType = EncodingType.NONE; AnnotateTextResponse actualResponse = client.annotateText(document, features, encodingType); Assert.assertEquals(expectedResponse, actualResponse); List<GeneratedMessageV3> actualRequests = mockLanguageService.getRequests(); Assert.assertEquals(1, actualRequests.size()); AnnotateTextRequest actualRequest = (AnnotateTextRequest) actualRequests.get(0); Assert.assertEquals(document, actualRequest.getDocument()); Assert.assertEquals(features, actualRequest.getFeatures()); Assert.assertEquals(encodingType, actualRequest.getEncodingType()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test @SuppressWarnings("all") public void annotateTextExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockLanguageService.addException(exception); try { Document document = Document.newBuilder().build(); AnnotateTextRequest.Features features = AnnotateTextRequest.Features.newBuilder().build(); EncodingType encodingType = EncodingType.NONE; client.annotateText(document, features, encodingType); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.instructions.spark; import java.util.Iterator; import java.util.LinkedList; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.function.PairFlatMapFunction; import org.apache.spark.api.java.function.PairFunction; import scala.Tuple2; import org.apache.sysml.hops.OptimizerUtils; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.runtime.controlprogram.context.ExecutionContext; import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext; import org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer; import org.apache.sysml.runtime.functionobjects.Multiply; import org.apache.sysml.runtime.functionobjects.Plus; import org.apache.sysml.runtime.instructions.InstructionUtils; import org.apache.sysml.runtime.instructions.cp.CPOperand; import org.apache.sysml.runtime.instructions.spark.utils.RDDAggregateUtils; import org.apache.sysml.runtime.instructions.spark.utils.SparkUtils; import org.apache.sysml.runtime.matrix.MatrixCharacteristics; import org.apache.sysml.runtime.matrix.data.MatrixBlock; import org.apache.sysml.runtime.matrix.data.MatrixIndexes; import org.apache.sysml.runtime.matrix.data.TripleIndexes; import org.apache.sysml.runtime.matrix.operators.AggregateBinaryOperator; import org.apache.sysml.runtime.matrix.operators.AggregateOperator; import org.apache.sysml.runtime.matrix.operators.Operator; public class RmmSPInstruction extends BinarySPInstruction { private RmmSPInstruction(Operator op, CPOperand in1, CPOperand in2, CPOperand out, String opcode, String istr) { super(op, in1, in2, out, opcode, istr); _sptype = SPINSTRUCTION_TYPE.RMM; } public static RmmSPInstruction parseInstruction( String str ) throws DMLRuntimeException { String parts[] = InstructionUtils.getInstructionPartsWithValueType(str); String opcode = parts[0]; if ( "rmm".equals(opcode) ) { CPOperand in1 = new CPOperand(parts[1]); CPOperand in2 = new CPOperand(parts[2]); CPOperand out = new CPOperand(parts[3]); return new RmmSPInstruction(null, in1, in2, out, opcode, str); } else { throw new DMLRuntimeException("RmmSPInstruction.parseInstruction():: Unknown opcode " + opcode); } } @Override public void processInstruction(ExecutionContext ec) throws DMLRuntimeException { SparkExecutionContext sec = (SparkExecutionContext)ec; //get input rdds MatrixCharacteristics mc1 = sec.getMatrixCharacteristics( input1.getName() ); MatrixCharacteristics mc2 = sec.getMatrixCharacteristics( input2.getName() ); JavaPairRDD<MatrixIndexes,MatrixBlock> in1 = sec.getBinaryBlockRDDHandleForVariable( input1.getName() ); JavaPairRDD<MatrixIndexes,MatrixBlock> in2 = sec.getBinaryBlockRDDHandleForVariable( input2.getName() ); MatrixCharacteristics mcOut = updateBinaryMMOutputMatrixCharacteristics(sec, true); //execute Spark RMM instruction //step 1: prepare join keys (w/ shallow replication), i/j/k JavaPairRDD<TripleIndexes,MatrixBlock> tmp1 = in1.flatMapToPair( new RmmReplicateFunction(mc2.getCols(), mc2.getColsPerBlock(), true)); JavaPairRDD<TripleIndexes,MatrixBlock> tmp2 = in2.flatMapToPair( new RmmReplicateFunction(mc1.getRows(), mc1.getRowsPerBlock(), false)); //step 2: join prepared datasets, multiply, and aggregate int numPartJoin = Math.max(getNumJoinPartitions(mc1, mc2), SparkExecutionContext.getDefaultParallelism(true)); int numPartOut = SparkUtils.getNumPreferredPartitions(mcOut); JavaPairRDD<MatrixIndexes,MatrixBlock> out = tmp1 .join( tmp2, numPartJoin ) //join by result block .mapToPair( new RmmMultiplyFunction() ); //do matrix multiplication out = RDDAggregateUtils.sumByKeyStable(out, //aggregation per result block numPartOut, false); //put output block into symbol table (no lineage because single block) sec.setRDDHandleForVariable(output.getName(), out); sec.addLineageRDD(output.getName(), input1.getName()); sec.addLineageRDD(output.getName(), input2.getName()); } private static int getNumJoinPartitions(MatrixCharacteristics mc1, MatrixCharacteristics mc2) { if( !mc1.dimsKnown() || !mc2.dimsKnown() ) SparkExecutionContext.getDefaultParallelism(true); //compute data size of replicated inputs double hdfsBlockSize = InfrastructureAnalyzer.getHDFSBlockSize(); double matrix1PSize = OptimizerUtils.estimatePartitionedSizeExactSparsity(mc1) * ((long) Math.ceil((double)mc2.getCols()/mc2.getColsPerBlock())); double matrix2PSize = OptimizerUtils.estimatePartitionedSizeExactSparsity(mc2) * ((long) Math.ceil((double)mc1.getRows()/mc1.getRowsPerBlock())); return (int) Math.max(Math.ceil((matrix1PSize+matrix2PSize)/hdfsBlockSize), 1); } private static class RmmReplicateFunction implements PairFlatMapFunction<Tuple2<MatrixIndexes, MatrixBlock>, TripleIndexes, MatrixBlock> { private static final long serialVersionUID = 3577072668341033932L; private long _len = -1; private long _blen = -1; private boolean _left = false; public RmmReplicateFunction(long len, long blen, boolean left) { _len = len; _blen = blen; _left = left; } @Override public Iterator<Tuple2<TripleIndexes, MatrixBlock>> call( Tuple2<MatrixIndexes, MatrixBlock> arg0 ) throws Exception { LinkedList<Tuple2<TripleIndexes, MatrixBlock>> ret = new LinkedList<>(); MatrixIndexes ixIn = arg0._1(); MatrixBlock blkIn = arg0._2(); long numBlocks = (long) Math.ceil((double)_len/_blen); if( _left ) //LHS MATRIX { //replicate wrt # column blocks in RHS long i = ixIn.getRowIndex(); long k = ixIn.getColumnIndex(); for( long j=1; j<=numBlocks; j++ ) { TripleIndexes tmptix = new TripleIndexes(i, j, k); ret.add( new Tuple2<>(tmptix, blkIn) ); } } else // RHS MATRIX { //replicate wrt # row blocks in LHS long k = ixIn.getRowIndex(); long j = ixIn.getColumnIndex(); for( long i=1; i<=numBlocks; i++ ) { TripleIndexes tmptix = new TripleIndexes(i, j, k); ret.add( new Tuple2<>(tmptix, blkIn) ); } } //output list of new tuples return ret.iterator(); } } private static class RmmMultiplyFunction implements PairFunction<Tuple2<TripleIndexes, Tuple2<MatrixBlock,MatrixBlock>>, MatrixIndexes, MatrixBlock> { private static final long serialVersionUID = -5772410117511730911L; private AggregateBinaryOperator _op = null; public RmmMultiplyFunction() { AggregateOperator agg = new AggregateOperator(0, Plus.getPlusFnObject()); _op = new AggregateBinaryOperator(Multiply.getMultiplyFnObject(), agg); } @Override public Tuple2<MatrixIndexes, MatrixBlock> call( Tuple2<TripleIndexes, Tuple2<MatrixBlock,MatrixBlock>> arg0 ) throws Exception { //get input blocks per TripleIndexes ixIn = arg0._1(); //i,j,k MatrixIndexes ixOut = new MatrixIndexes(ixIn.getFirstIndex(), ixIn.getSecondIndex()); //i,j MatrixBlock blkIn1 = arg0._2()._1(); MatrixBlock blkIn2 = arg0._2()._2(); MatrixBlock blkOut = new MatrixBlock(); //core block matrix multiplication blkIn1.aggregateBinaryOperations(blkIn1, blkIn2, blkOut, _op); //output new tuple return new Tuple2<>(ixOut, blkOut); } } }
// ============================================================================ // // Copyright (C) 2006-2018 Talend Inc. - www.talend.com // // This source code is available under agreement available at // https://github.com/Talend/data-prep/blob/master/LICENSE // // You should have received a copy of the agreement // along with this program; if not, write to Talend SA // 9 rue Pages 92150 Suresnes, France // // ============================================================================ package org.talend.dataprep.qa.config; import static junit.framework.TestCase.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.collection.IsEmptyCollection.empty; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.springframework.http.HttpStatus.OK; import static org.talend.dataprep.qa.config.FeatureContext.suffixName; import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import javax.annotation.PostConstruct; import org.apache.commons.collections.CollectionUtils; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpStatus; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.support.AnnotationConfigContextLoader; import org.talend.dataprep.helper.OSDataPrepAPIHelper; import org.talend.dataprep.helper.VerboseMode; import org.talend.dataprep.qa.SpringContextConfiguration; import org.talend.dataprep.qa.dto.ContentMetadataColumn; import org.talend.dataprep.qa.dto.DatasetContent; import org.talend.dataprep.qa.dto.Folder; import org.talend.dataprep.qa.dto.PreparationContent; import org.talend.dataprep.qa.dto.PreparationDetails; import org.talend.dataprep.qa.dto.Statistics; import org.talend.dataprep.qa.util.OSIntegrationTestUtil; import org.talend.dataprep.qa.util.folder.FolderUtil; import com.fasterxml.jackson.databind.ObjectMapper; import com.jayway.restassured.response.Response; import cucumber.api.DataTable; /** * Base class for all DataPrep step classes. */ @ContextConfiguration(classes = SpringContextConfiguration.class, loader = AnnotationConfigContextLoader.class) public abstract class DataPrepStep { /** * {@link cucumber.api.DataTable} key for origin folder. */ protected static final String ORIGIN = "origin"; /** * {@link cucumber.api.DataTable} key for preparationName value. */ protected static final String PREPARATION_NAME = "preparationName"; protected static final String DATASET_NAME_KEY = "name"; protected static final String DATASET_ID_KEY = "dataSetId"; protected static final String HEAD_ID = "HEAD"; protected static final String VERSION_HEAD = "head"; /** * This class' logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(DataPrepStep.class); protected final ObjectMapper objectMapper = new ObjectMapper(); @Autowired public FeatureContext context; @Autowired protected OSDataPrepAPIHelper api; @Autowired protected OSIntegrationTestUtil util; @Autowired protected FolderUtil folderUtil; @Value("${restassured.debug:NONE}") private VerboseMode restAssuredDebug; @PostConstruct public void init() { api.setRestAssuredDebug(restAssuredDebug); } /** * Retrieve the details of a preparation from its id. * * @param preparationId the preparation id. * @return the preparation details. */ protected PreparationDetails getPreparationDetails(String preparationId) { Response response = api.getPreparationDetails(preparationId); response.then().statusCode(HttpStatus.OK.value()); return response.as(PreparationDetails.class); } protected Predicate<String> preparationDeletionIsNotOK() { return preparationId -> { try { return api.deletePreparation(preparationId).getStatusCode() != OK.value(); } catch (Exception ex) { LOGGER.debug("Error on preparation's suppression {}.", preparationId); return true; } }; } protected Predicate<String> datasetDeletionIsNotOK() { return datasetId -> { try { // Even if the dataset doesn't exist, the status is 200 return api.deleteDataset(datasetId).getStatusCode() != OK.value(); } catch (Exception ex) { LOGGER.debug("Error on Dataset's suppression {}.", datasetId); return true; } }; } protected Predicate<Folder> folderDeletionIsNotOK() { return folder -> { try { return !HttpStatus.valueOf(folderUtil.deleteFolder(folder).getStatusCode()).is2xxSuccessful(); } catch (Exception ex) { LOGGER.debug("Error on folder's suppression {}.", folder.getPath()); return true; } }; } protected class CleanAfterException extends RuntimeException { CleanAfterException(String s) { super(s); } } protected void checkColumnNames(String datasetOrPreparationName, List<String> expectedColumnNames, List<String> actual) { assertNotNull("The returned columns' list on \"" + datasetOrPreparationName + "\" is null.", actual); assertFalse("No columns in \"" + datasetOrPreparationName + "\".", actual.isEmpty()); assertEquals("Not the expected number of columns in \"" + datasetOrPreparationName + "\".", expectedColumnNames.size(), actual.size()); assertTrue( "\"" + datasetOrPreparationName + "\" doesn't contain all expected columns : \"" + CollectionUtils.disjunction(expectedColumnNames, actual).toString() + "\".", actual.containsAll(expectedColumnNames)); } /** * Returns the dataset content, once all DQ analysis are done and so all fields are up-to-date. * * @param datasetId the id of the dataset * @param tql the TQL filter to apply to the dataset * @return the up-to-date dataset content */ protected DatasetContent getDatasetContent(String datasetId, String tql) throws Exception { AtomicReference<DatasetContent> datasetContentReference = new AtomicReference<>(); // TODO I guess this wait is useless since we use {DataPrepStep#checkDatasetMetadataStatus} before api.waitResponse("Waiting frequency table from dataset metadata of " + datasetId).until(() -> { Response response = api.getDataset(datasetId, tql); response.then().statusCode(200); DatasetContent datasetContent = response.as(DatasetContent.class); datasetContentReference.set(datasetContent); return datasetContent.metadata.columns // .stream() // .findFirst() // .orElse(new ContentMetadataColumn()).statistics.frequencyTable; }, is(not(empty()))); return datasetContentReference.get(); } // FixMe : same thing as the other one because DatasetContent seems to be the same thing as PreparationContent protected PreparationContent getPreparationContent(String preparationName, String tql) throws IOException { String preparationId = context.getPreparationId(suffixName(preparationName)); Response response = api.getPreparationContent(preparationId, VERSION_HEAD, HEAD_ID, tql); response.then().statusCode(200); return response.as(PreparationContent.class); } protected void checkSampleRecordsCount(String actualRecordsCount, String expectedRecordsCount) { if (expectedRecordsCount == null) { return; } Assert.assertEquals("The count records " + expectedRecordsCount + "is wrong: " + actualRecordsCount, expectedRecordsCount, actualRecordsCount); } protected void checkRecords(List<Object> actualRecords, String expectedRecordsFilename) throws Exception { if (expectedRecordsFilename == null) { return; } InputStream expectedRecordsFileStream = DataPrepStep.class.getResourceAsStream(expectedRecordsFilename); List<Object> expectedRecords = objectMapper.readValue(expectedRecordsFileStream, DatasetContent.class).records; Assert.assertEquals(expectedRecords.size(), actualRecords.size()); Assert.assertTrue( "Difference between expected records and actual records:" // + CollectionUtils.disjunction(expectedRecords, actualRecords).toString(), actualRecords.containsAll(expectedRecords)); } protected void checkQualityPerColumn(List<ContentMetadataColumn> columns, String expectedQualityFilename) throws Exception { if (expectedQualityFilename == null) { return; } InputStream expectedQualityFileStream = DataPrepStep.class.getResourceAsStream(expectedQualityFilename); List<ContentMetadataColumn> expectedQualityPerColumn = objectMapper.readValue(expectedQualityFileStream, DatasetContent.class).metadata.columns; Assert.assertEquals(expectedQualityPerColumn.size(), columns.size()); Collections.sort(columns); Collections.sort(expectedQualityPerColumn); for (int i = 0; i < expectedQualityPerColumn.size(); i++) { ContentMetadataColumn expectedColumn = expectedQualityPerColumn.get(i); ContentMetadataColumn column = columns.get(i); Assert.assertEquals(expectedColumn.id, column.id); Assert.assertEquals(expectedColumn.name, column.name); Assert.assertEquals(expectedColumn.type, column.type); Assert.assertEquals(expectedColumn.domain, column.domain); Map<String, Integer> expectedQuality = expectedColumn.quality; Statistics expectedStatistics = expectedColumn.statistics; Map<String, Integer> quality = column.quality; Assert.assertEquals( "The valid records count " + expectedQuality.get("valid") + "is wrong: " + quality.get("valid"), expectedQuality.get("valid"), quality.get("valid")); Assert.assertEquals( "The valid records count " + expectedQuality.get("empty") + "is wrong: " + quality.get("empty"), expectedQuality.get("empty"), quality.get("empty")); Assert.assertEquals( "The valid records count " + expectedQuality.get("invalid") + "is wrong: " + quality.get("invalid"), expectedQuality.get("invalid"), quality.get("invalid")); Statistics statistics = column.statistics; if (expectedStatistics != null && statistics != null) { Assert.assertTrue( "Difference between expected records and actual records:" + // CollectionUtils .disjunction(expectedStatistics.patternFrequencyTable, statistics.patternFrequencyTable) .toString(), expectedStatistics.patternFrequencyTable.containsAll(statistics.patternFrequencyTable)); Assert.assertTrue(expectedStatistics.frequencyTable.containsAll(statistics.frequencyTable)); } } } public void checkContent(PreparationContent preparation, DataTable dataTable) throws Exception { Map<String, String> expected = dataTable.asMap(String.class, String.class); checkRecords(preparation.records, expected.get("records")); checkQualityPerColumn(preparation.metadata.columns, expected.get("quality")); checkSampleRecordsCount(preparation.metadata.records, expected.get("sample_records_count")); } }
/* * This file is part of the DITA Open Toolkit project. * * Copyright 2004, 2005 IBM Corporation * * See the accompanying LICENSE file for applicable license. */ package org.dita.dost.module; import com.google.common.collect.MultimapBuilder.SetMultimapBuilder; import com.google.common.collect.SetMultimap; import org.dita.dost.exception.DITAOTException; import org.dita.dost.exception.DITAOTXMLErrorHandler; import org.dita.dost.log.MessageUtils; import org.dita.dost.module.reader.TempFileNameScheme; import org.dita.dost.pipeline.AbstractPipelineInput; import org.dita.dost.pipeline.AbstractPipelineOutput; import org.dita.dost.reader.DitaValReader; import org.dita.dost.reader.GenListModuleReader; import org.dita.dost.reader.KeydefFilter; import org.dita.dost.reader.SubjectSchemeReader; import org.dita.dost.util.*; import org.dita.dost.writer.DebugFilter; import org.dita.dost.writer.ExportAnchorsFilter; import org.dita.dost.writer.ProfilingFilter; import org.xml.sax.ContentHandler; import org.xml.sax.SAXParseException; import org.xml.sax.XMLFilter; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import java.io.*; import java.net.URI; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.dita.dost.reader.GenListModuleReader.*; import static org.dita.dost.util.Configuration.Mode; import static org.dita.dost.util.Configuration.printTranstype; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.Job.FileInfo; import static org.dita.dost.util.Job.USER_INPUT_FILE_LIST_FILE; import static org.dita.dost.util.URLUtils.*; /** * This class extends AbstractPipelineModule, used to generate map and topic * list by parsing all the refered dita files. * * @version 1.0 2004-11-25 * * @author Wu, Zhi Qiang */ public final class GenMapAndTopicListModule extends SourceReaderModule { public static final String ELEMENT_STUB = "stub"; /** Generate {@code xtrf} and {@code xtrc} attributes */ private boolean genDebugInfo; private Mode processingMode; /** FileInfos keyed by src. */ private final Map<URI, FileInfo> fileinfos = new HashMap<>(); /** Set of all topic files */ private final Set<URI> fullTopicSet; /** Set of all map files */ private final Set<URI> fullMapSet; /** Set of topic files containing href */ private final Set<URI> hrefTopicSet; /** Set of dita files containing conref */ private final Set<URI> conrefSet; /** Set of topic files containing coderef */ private final Set<URI> coderefSet; /** Set of all images */ private final Set<Reference> formatSet; /** Set of all images used for flagging */ private final Set<URI> flagImageSet; /** Set of all HTML and other non-DITA or non-image files */ private final SetMultimap<String, URI> htmlSet; /** Set of all the href targets */ private final Set<URI> hrefTargetSet; /** Set of all the conref targets */ private Set<URI> conrefTargetSet; /** Set of all the non-conref targets */ private final Set<URI> nonConrefCopytoTargetSet; /** Set of subsidiary files */ private final Set<URI> coderefTargetSet; /** Set of absolute flag image files */ private final Set<URI> relFlagImagesSet; /** List of files waiting for parsing. Values are absolute URI references. */ private final NavigableMap<URI, Reference> waitList; /** List of parsed files */ private final Set<URI> doneList; private final Set<URI> failureList; /** Set of outer dita files */ private final Set<URI> outDitaFilesSet; /** Set of sources of conacion */ private final Set<URI> conrefpushSet; /** Set of files containing keyref */ private final Set<URI> keyrefSet; /** Set of files with "@processing-role=resource-only" */ private final Set<URI> resourceOnlySet; /** Absolute basedir for processing */ private URI baseInputDir; /** Profiling is enabled. */ private boolean profilingEnabled; /** Absolute path for filter file. */ private File ditavalFile; /** Number of directory levels base directory is adjusted. */ private int uplevels = 0; private GenListModuleReader listFilter; private KeydefFilter keydefFilter; private ExportAnchorsFilter exportAnchorsFilter; private ContentHandler nullHandler; private FilterUtils filterUtils; private TempFileNameScheme tempFileNameScheme; /** Absolute path to input file. */ private URI rootFile; private List<URI> resources; /** File currently being processed */ private URI currentFile; /** Subject scheme key map. Key is key value, value is key definition. */ private Map<String, KeyDef> schemekeydefMap; /** Subject scheme absolute file paths. */ private final Set<URI> schemeSet; /** Subject scheme usage. Key is absolute file path, value is set of applicable subject schemes. */ private final Map<URI, Set<URI>> schemeDictionary; private final Map<URI, URI> copyTo = new HashMap<>(); private String transtype; private boolean setSystemid = true; /** Formats for source topics */ // XXX This is a hack to retain format. A better solution would be to keep the format with the source URI private final Map<URI, String> sourceFormat = new HashMap<>(); /** * Create a new instance and do the initialization. */ public GenMapAndTopicListModule() { super(); fullTopicSet = new HashSet<>(128); fullMapSet = new HashSet<>(128); hrefTopicSet = new HashSet<>(128); schemeSet = new HashSet<>(128); conrefSet = new HashSet<>(128); formatSet = new HashSet<>(); flagImageSet = new LinkedHashSet<>(128); htmlSet = SetMultimapBuilder.hashKeys().hashSetValues().build(); hrefTargetSet = new HashSet<>(128); coderefTargetSet = new HashSet<>(16); waitList = new ConcurrentSkipListMap<>(); doneList = ConcurrentHashMap.newKeySet(); failureList = ConcurrentHashMap.newKeySet(); conrefTargetSet = new HashSet<>(128); nonConrefCopytoTargetSet = new HashSet<>(128); outDitaFilesSet = new HashSet<>(128); relFlagImagesSet = new LinkedHashSet<>(128); conrefpushSet = new HashSet<>(128); keyrefSet = new HashSet<>(128); coderefSet = new HashSet<>(128); schemeDictionary = new HashMap<>(); // @processing-role resourceOnlySet = new HashSet<>(128); } @Override public AbstractPipelineOutput execute(final AbstractPipelineInput input) throws DITAOTException { if (logger == null) { throw new IllegalStateException("Logger not set"); } try { parseInputParameters(input); initFilters(); initXmlReader(); readResourceFiles(); readStartFile(); processWaitList(); updateBaseDirectory(); handleConref(); outputResult(); } catch (final DITAOTException e) { throw e; } catch (final Exception e) { throw new DITAOTException(e.getMessage(), e); } return null; } private void readResourceFiles() throws DITAOTException { if (!resources.isEmpty()) { for (URI resource : resources) { addToWaitList(new Reference(resource)); } processWaitList(); resourceOnlySet.addAll(hrefTargetSet); resourceOnlySet.addAll(conrefTargetSet); resourceOnlySet.addAll(nonConrefCopytoTargetSet); resourceOnlySet.addAll(outDitaFilesSet); resourceOnlySet.addAll(conrefpushSet); resourceOnlySet.addAll(keyrefSet); resourceOnlySet.addAll(resourceOnlySet); resourceOnlySet.addAll(fullTopicSet); resourceOnlySet.addAll(fullMapSet); resourceOnlySet.addAll(conrefSet); } } private void readStartFile() throws DITAOTException { addToWaitList(new Reference(rootFile)); } /** * Initialize reusable filters. */ private void initFilters() { listFilter = new GenListModuleReader(); listFilter.setLogger(logger); listFilter.setPrimaryDitamap(rootFile); listFilter.setJob(job); if (profilingEnabled) { filterUtils = parseFilterFile(); } if (INDEX_TYPE_ECLIPSEHELP.equals(transtype)) { exportAnchorsFilter = new ExportAnchorsFilter(); exportAnchorsFilter.setInputFile(rootFile); } keydefFilter = new KeydefFilter(); keydefFilter.setLogger(logger); keydefFilter.setCurrentFile(rootFile); keydefFilter.setJob(job); nullHandler = new DefaultHandler(); } private void parseInputParameters(final AbstractPipelineInput input) { ditavalFile = new File(job.tempDir, FILE_NAME_MERGED_DITAVAL); validate = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_VALIDATE)); if (!validate) { final String msg = MessageUtils.getMessage("DOTJ037W").toString(); logger.warn(msg); } transtype = input.getAttribute(ANT_INVOKER_EXT_PARAM_TRANSTYPE); gramcache = "yes".equalsIgnoreCase(input.getAttribute(ANT_INVOKER_EXT_PARAM_GRAMCACHE)); setSystemid = "yes".equalsIgnoreCase(input.getAttribute(ANT_INVOKER_EXT_PARAN_SETSYSTEMID)); final String mode = input.getAttribute(ANT_INVOKER_EXT_PARAM_PROCESSING_MODE); processingMode = mode != null ? Mode.valueOf(mode.toUpperCase()) : Mode.LAX; genDebugInfo = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_GENERATE_DEBUG_ATTR)); // For the output control job.setGeneratecopyouter(input.getAttribute(ANT_INVOKER_EXT_PARAM_GENERATECOPYOUTTER)); job.setOutterControl(input.getAttribute(ANT_INVOKER_EXT_PARAM_OUTTERCONTROL)); job.setOnlyTopicInMap(Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_ONLYTOPICINMAP))); job.setCrawl(input.getAttribute(ANT_INVOKER_EXT_PARAM_CRAWL)); // Set the OutputDir final File path = toFile(input.getAttribute(ANT_INVOKER_EXT_PARAM_OUTPUTDIR)); if (path.isAbsolute()) { job.setOutputDir(path); } else { throw new IllegalArgumentException("Output directory " + path + " must be absolute"); } final File basedir = toFile(input.getAttribute(ANT_INVOKER_PARAM_BASEDIR)); final URI ditaInputDir = toURI(input.getAttribute(ANT_INVOKER_EXT_PARAM_INPUTDIR)); if (ditaInputDir != null) { if (ditaInputDir.isAbsolute()) { baseInputDir = ditaInputDir; } else if (ditaInputDir.getPath() != null && ditaInputDir.getPath().startsWith(URI_SEPARATOR)) { baseInputDir = setScheme(ditaInputDir, "file"); } else { // XXX Shouldn't this be resolved to current directory, not Ant script base directory? baseInputDir = basedir.toURI().resolve(ditaInputDir); } assert baseInputDir.isAbsolute(); } if (input.getAttribute(ANT_INVOKER_PARAM_RESOURCES) != null) { resources = Stream.of(input.getAttribute(ANT_INVOKER_PARAM_RESOURCES).split(File.pathSeparator)) .map(resource -> new File(resource).toURI()) .collect(Collectors.toList()); } else { resources = Collections.emptyList(); } final URI ditaInput = toURI(input.getAttribute(ANT_INVOKER_PARAM_INPUTMAP)); if (ditaInput.isAbsolute()) { rootFile = ditaInput; } else if (ditaInput.getPath() != null && ditaInput.getPath().startsWith(URI_SEPARATOR)) { rootFile = setScheme(ditaInput, "file"); } else if (baseInputDir != null) { rootFile = baseInputDir.resolve(ditaInput); } else { rootFile = basedir.toURI().resolve(ditaInput); } assert rootFile.isAbsolute(); if (baseInputDir == null) { baseInputDir = rootFile.resolve("."); } assert baseInputDir.isAbsolute(); profilingEnabled = true; if (input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED) != null) { profilingEnabled = Boolean.parseBoolean(input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED)); } // create the keydef file for scheme files schemekeydefMap = new HashMap<>(); // Set the mapDir job.setInputFile(rootFile); } private void processWaitList() throws DITAOTException { for (Map.Entry<URI, Reference> entry = waitList.pollFirstEntry(); entry != null; entry = waitList.pollFirstEntry()) { processFile(entry.getValue()); } } /** * Get pipe line filters * * @param fileToParse absolute path to current file being processed */ @Override List<XMLFilter> getProcessingPipe(final URI fileToParse) { assert fileToParse.isAbsolute(); final List<XMLFilter> pipe = new ArrayList<>(); if (genDebugInfo) { final DebugFilter debugFilter = new DebugFilter(); debugFilter.setLogger(logger); debugFilter.setCurrentFile(currentFile); pipe.add(debugFilter); } if (filterUtils != null) { final ProfilingFilter profilingFilter = new ProfilingFilter(false); profilingFilter.setLogger(logger); profilingFilter.setJob(job); profilingFilter.setFilterUtils(filterUtils); profilingFilter.setCurrentFile(fileToParse); pipe.add(profilingFilter); } if (INDEX_TYPE_ECLIPSEHELP.equals(transtype)) { exportAnchorsFilter.setCurrentFile(fileToParse); exportAnchorsFilter.setErrorHandler(new DITAOTXMLErrorHandler(fileToParse.toString(), logger)); pipe.add(exportAnchorsFilter); } keydefFilter.setCurrentDir(fileToParse.resolve(".")); keydefFilter.setErrorHandler(new DITAOTXMLErrorHandler(fileToParse.toString(), logger)); pipe.add(keydefFilter); listFilter.setCurrentFile(fileToParse); listFilter.setErrorHandler(new DITAOTXMLErrorHandler(fileToParse.toString(), logger)); pipe.add(listFilter); return pipe; } /** * Read a file and process it for list information. * * @param ref system path of the file to process * @throws DITAOTException if processing failed */ private void processFile(final Reference ref) throws DITAOTException { currentFile = ref.filename; assert currentFile.isAbsolute(); logger.info("Processing " + currentFile); final String[] params = { currentFile.toString() }; try { XMLReader xmlSource = getXmlReader(ref.format); for (final XMLFilter f: getProcessingPipe(currentFile)) { f.setParent(xmlSource); f.setEntityResolver(CatalogUtils.getCatalogResolver()); xmlSource = f; } xmlSource.setContentHandler(nullHandler); xmlSource.parse(currentFile.toString()); if (listFilter.isValidInput()) { processParseResult(currentFile); categorizeCurrentFile(ref); } else if (!currentFile.equals(rootFile)) { logger.error(MessageUtils.getMessage("DOTJ021E", params).toString()); failureList.add(currentFile); } } catch (final RuntimeException e) { throw e; } catch (final SAXParseException sax) { final Exception inner = sax.getException(); if (inner != null && inner instanceof DITAOTException) { throw (DITAOTException) inner; } if (currentFile.equals(rootFile)) { throw new DITAOTException(MessageUtils.getMessage("DOTJ012F", params).toString() + ": " + sax.getMessage(), sax); } else if (processingMode == Mode.STRICT) { throw new DITAOTException(MessageUtils.getMessage("DOTJ013E", params).toString() + ": " + sax.getMessage(), sax); } else { logger.error(MessageUtils.getMessage("DOTJ013E", params).toString() + ": " + sax.getMessage(), sax); } failureList.add(currentFile); } catch (final FileNotFoundException e) { if (!exists(currentFile)) { if (currentFile.equals(rootFile)) { throw new DITAOTException(MessageUtils.getMessage("DOTA069F", params).toString(), e); } else if (processingMode == Mode.STRICT) { throw new DITAOTException(MessageUtils.getMessage("DOTX008E", params).toString(), e); } else { logger.error(MessageUtils.getMessage("DOTX008E", params).toString()); } } else if (currentFile.equals(rootFile)) { throw new DITAOTException(MessageUtils.getMessage("DOTJ078F", params).toString() + " Cannot load file: " + e.getMessage(), e); } else if (processingMode == Mode.STRICT) { throw new DITAOTException(MessageUtils.getMessage("DOTJ079E", params).toString() + " Cannot load file: " + e.getMessage(), e); } else { logger.error(MessageUtils.getMessage("DOTJ079E", params).toString() + " Cannot load file: " + e.getMessage()); } failureList.add(currentFile); } catch (final Exception e) { if (currentFile.equals(rootFile)) { throw new DITAOTException(MessageUtils.getMessage("DOTJ012F", params).toString() + ": " + e.getMessage(), e); } else if (processingMode == Mode.STRICT) { throw new DITAOTException(MessageUtils.getMessage("DOTJ013E", params).toString() + ": " + e.getMessage(), e); } else { logger.error(MessageUtils.getMessage("DOTJ013E", params).toString() + ": " + e.getMessage(), e); } failureList.add(currentFile); } if (!listFilter.isValidInput() && currentFile.equals(rootFile)) { if (validate) { // stop the build if all content in the input file was filtered out. throw new DITAOTException(MessageUtils.getMessage("DOTJ022F", params).toString()); } else { // stop the build if the content of the file is not valid. throw new DITAOTException(MessageUtils.getMessage("DOTJ034F", params).toString()); } } doneList.add(currentFile); listFilter.reset(); keydefFilter.reset(); } /** * Process results from parsing a single topic or map * * @param currentFile absolute URI processes files */ private void processParseResult(final URI currentFile) { // Category non-copyto result and update uplevels accordingly final Set<Reference> nonCopytoResult = new LinkedHashSet<>(128); nonCopytoResult.addAll(listFilter.getNonConrefCopytoTargets()); for (final URI f : listFilter.getConrefTargets()) { nonCopytoResult.add(new Reference(stripFragment(f), listFilter.currentFileFormat())); } for (final URI f : listFilter.getCopytoMap().values()) { nonCopytoResult.add(new Reference(stripFragment(f))); } for (final URI f : listFilter.getIgnoredCopytoSourceSet()) { nonCopytoResult.add(new Reference(stripFragment(f))); } for (final URI filename1 : listFilter.getCoderefTargetSet()) { nonCopytoResult.add(new Reference(stripFragment(filename1))); } for (final Reference file: nonCopytoResult) { categorizeReferenceFile(file); updateUplevels(file.filename); } for (final Map.Entry<URI, URI> e : listFilter.getCopytoMap().entrySet()) { final URI source = e.getValue(); final URI target = e.getKey(); copyTo.put(target, source); updateUplevels(target); } final Set<URI> nonTopicrefReferenceSet = new HashSet<>(); nonTopicrefReferenceSet.addAll(listFilter.getNonTopicrefReferenceSet()); nonTopicrefReferenceSet.removeAll(listFilter.getNormalProcessingRoleSet()); nonTopicrefReferenceSet.removeAll(listFilter.getResourceOnlySet()); for (final URI file: nonTopicrefReferenceSet) { updateUplevels(file); } schemeSet.addAll(listFilter.getSchemeRefSet()); // collect key definitions for (final Map.Entry<String, KeyDef> e: keydefFilter.getKeysDMap().entrySet()) { // key and value.keys will differ when keydef is a redirect to another keydef final String key = e.getKey(); final KeyDef value = e.getValue(); if (schemeSet.contains(currentFile)) { schemekeydefMap.put(key, new KeyDef(key, value.href, value.scope, value.format, currentFile, null)); } } hrefTargetSet.addAll(listFilter.getHrefTargets()); conrefTargetSet.addAll(listFilter.getConrefTargets()); final Set<URI> nonConrefCopytoTargets = listFilter.getNonConrefCopytoTargets().stream() .map(r -> r.filename) .collect(Collectors.toSet()); nonConrefCopytoTargetSet.addAll(nonConrefCopytoTargets); coderefTargetSet.addAll(listFilter.getCoderefTargets()); outDitaFilesSet.addAll(listFilter.getOutDitaFilesSet()); // Generate topic-scheme dictionary final Set<URI> schemeSet = listFilter.getSchemeSet(); if (schemeSet != null && !schemeSet.isEmpty()) { Set<URI> children = schemeDictionary.get(currentFile); if (children == null) { children = new HashSet<>(); } children.addAll(schemeSet); schemeDictionary.put(currentFile, children); final Set<URI> hrfSet = listFilter.getHrefTargets(); for (final URI filename: hrfSet) { children = schemeDictionary.get(filename); if (children == null) { children = new HashSet<>(); } children.addAll(schemeSet); schemeDictionary.put(filename, children); } } } /** * Categorize current file type * * @param ref file path */ private void categorizeCurrentFile(final Reference ref) { final URI currentFile = ref.filename; if (listFilter.hasConaction()) { conrefpushSet.add(currentFile); } if (listFilter.hasConRef()) { conrefSet.add(currentFile); } if (listFilter.hasKeyRef()) { keyrefSet.add(currentFile); } if (listFilter.hasCodeRef()) { coderefSet.add(currentFile); } if (listFilter.isDitaTopic()) { if (ref.format != null && !ref.format.equals(ATTR_FORMAT_VALUE_DITA)) { assert currentFile.getFragment() == null; if (!sourceFormat.containsKey(currentFile)) { sourceFormat.put(currentFile, ref.format); } } fullTopicSet.add(currentFile); hrefTargetSet.add(currentFile); if (listFilter.hasHref()) { hrefTopicSet.add(currentFile); } } else if (listFilter.isDitaMap()) { fullMapSet.add(currentFile); } } /** * Categorize file. * * @param file file system path with optional format */ private void categorizeReferenceFile(final Reference file) { // avoid files referred by coderef being added into wait list if (listFilter.getCoderefTargets().contains(file.filename)) { return; } if (isFormatDita(file.format) && listFilter.isDitaTopic() && !job.crawlTopics() && !listFilter.getConrefTargets().contains(file.filename)) { return; // Do not process topics linked from within topics } else if ((isFormatDita(file.format) || ATTR_FORMAT_VALUE_DITAMAP.equals(file.format))) { addToWaitList(file); } else if (ATTR_FORMAT_VALUE_IMAGE.equals(file.format)) { formatSet.add(file); if (!exists(file.filename)) { logger.warn(MessageUtils.getMessage("DOTX008E", file.filename.toString()).toString()); } } else if (ATTR_FORMAT_VALUE_DITAVAL.equals(file.format)) { formatSet.add(file); } else { htmlSet.put(file.format, file.filename); } } /** * Update uplevels if needed. If the parameter contains a {@link org.dita.dost.util.Constants#STICK STICK}, it and * anything following it is removed. * * @param file file path */ private void updateUplevels(final URI file) { assert file.isAbsolute(); if (file.getPath() != null) { final URI f = file.toString().contains(STICK) ? toURI(file.toString().substring(0, file.toString().indexOf(STICK))) : file; final URI relative = getRelativePath(rootFile, f).normalize(); final int lastIndex = relative.getPath().lastIndexOf(".." + URI_SEPARATOR); if (lastIndex != -1) { final int newUplevels = lastIndex / 3 + 1; uplevels = Math.max(newUplevels, uplevels); } } } /** * Add the given file the wait list if it has not been parsed. * * @param ref reference to absolute system path */ private void addToWaitList(final Reference ref) { final URI file = ref.filename; assert file.isAbsolute() && file.getFragment() == null; if (doneList.contains(file) || waitList.containsKey(ref.filename) || file.equals(currentFile)) { return; } waitList.put(ref.filename, ref); } /** * Update base directory and prefix based on uplevels. */ private void updateBaseDirectory() { for (int i = uplevels; i > 0; i--) { baseInputDir = baseInputDir.resolve(".."); } } /** * Get up-levels absolute path. * * @param rootTemp relative URI for temporary root file * @return path to up-level, e.g. {@code ../../}, may be empty string */ private String getLevelsPath(final URI rootTemp) { final int u = rootTemp.toString().split(URI_SEPARATOR).length - 1; if (u == 0) { return ""; } final StringBuilder buff = new StringBuilder(); for (int current = u; current > 0; current--) { buff.append("..").append(File.separator); } return buff.toString(); } /** * Parse filter file * * @return configured filter utility */ private FilterUtils parseFilterFile() { final FilterUtils filterUtils; if (ditavalFile.exists()) { final DitaValReader ditaValReader = new DitaValReader(); ditaValReader.setLogger(logger); ditaValReader.setJob(job); ditaValReader.read(ditavalFile.toURI()); flagImageSet.addAll(ditaValReader.getImageList()); relFlagImagesSet.addAll(ditaValReader.getRelFlagImageList()); filterUtils = new FilterUtils(printTranstype.contains(transtype), ditaValReader.getFilterMap(), ditaValReader.getForegroundConflictColor(), ditaValReader.getBackgroundConflictColor()); } else { filterUtils = new FilterUtils(printTranstype.contains(transtype)); } filterUtils.setLogger(logger); return filterUtils; } /** * Handle topic which are only conref sources from normal processing. */ private void handleConref() { // Get pure conref targets final Set<URI> pureConrefTargets = new HashSet<>(); for (final URI target: conrefTargetSet) { if (!nonConrefCopytoTargetSet.contains(target)) { pureConrefTargets.add(target); } } conrefTargetSet = pureConrefTargets; // Remove pure conref targets from fullTopicSet fullTopicSet.removeAll(pureConrefTargets); // Treat pure conref targets same as resource-only resourceOnlySet.addAll(pureConrefTargets); } /** * Write result files. * * @throws DITAOTException if writing result files failed */ private void outputResult() throws DITAOTException { try { tempFileNameScheme = (TempFileNameScheme) Class.forName(job.getProperty("temp-file-name-scheme")).newInstance(); } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { throw new RuntimeException(e); } tempFileNameScheme.setBaseDir(baseInputDir); // assume empty Job final URI rootTemp = tempFileNameScheme.generateTempFileName(rootFile); final File relativeRootFile = toFile(rootTemp); job.setInputDir(baseInputDir); job.setInputMap(rootTemp); //If root input file is marked resource only due to conref or other feature, remove that designation if (resourceOnlySet.contains(rootFile)) { resourceOnlySet.remove(rootFile); } job.setProperty(INPUT_DITAMAP_LIST_FILE_LIST, USER_INPUT_FILE_LIST_FILE); final File inputfile = new File(job.tempDir, USER_INPUT_FILE_LIST_FILE); writeListFile(inputfile, relativeRootFile.toString()); job.setProperty("tempdirToinputmapdir.relative.value", StringUtils.escapeRegExp(getPrefix(relativeRootFile))); job.setProperty("uplevels", getLevelsPath(rootTemp)); resourceOnlySet.addAll(resources); final Set<URI> res = new HashSet<>(); res.addAll(listFilter.getResourceOnlySet()); res.removeAll(listFilter.getNormalProcessingRoleSet()); resourceOnlySet.addAll(res); if (job.getOnlyTopicInMap() || !job.crawlTopics()) { final Set<URI> res1 = new HashSet<>(); res1.addAll(listFilter.getNonTopicrefReferenceSet()); res1.removeAll(listFilter.getNormalProcessingRoleSet()); res1.removeAll(listFilter.getResourceOnlySet()); resourceOnlySet.addAll(res1); } for (final URI file: outDitaFilesSet) { getOrCreateFileInfo(fileinfos, file).isOutDita = true; } for (final URI file: fullTopicSet) { final FileInfo ff = getOrCreateFileInfo(fileinfos, file); if (ff.format == null) { ff.format = sourceFormat.getOrDefault(ff.src, ATTR_FORMAT_VALUE_DITA); } } for (final URI file: fullMapSet) { final FileInfo ff = getOrCreateFileInfo(fileinfos, file); if (ff.format == null) { ff.format = ATTR_FORMAT_VALUE_DITAMAP; } } for (final URI file: hrefTopicSet) { final FileInfo f = getOrCreateFileInfo(fileinfos, file); f.hasLink = true; if (f.format == null && sourceFormat.containsKey(f.src)) { f.format = sourceFormat.get(f.src); } } for (final URI file: conrefSet) { getOrCreateFileInfo(fileinfos, file).hasConref = true; } for (final Reference file: formatSet) { getOrCreateFileInfo(fileinfos, file.filename).format = file.format; } for (final URI file: flagImageSet) { final FileInfo f = getOrCreateFileInfo(fileinfos, file); f.isFlagImage = true; f.format = ATTR_FORMAT_VALUE_IMAGE; } for (final String format: htmlSet.keySet()) { for (final URI file : htmlSet.get(format)) { getOrCreateFileInfo(fileinfos, file).format = format; } } for (final URI file: hrefTargetSet) { final FileInfo f = getOrCreateFileInfo(fileinfos, file); f.isTarget = true; if (f.format == null && sourceFormat.containsKey(f.src)) { f.format = sourceFormat.get(f.src); } } for (final URI file: schemeSet) { getOrCreateFileInfo(fileinfos, file).isSubjectScheme = true; } for (final URI file: coderefTargetSet) { final FileInfo f = getOrCreateFileInfo(fileinfos, file); f.isSubtarget = true; if (f.format == null) { f.format = PR_D_CODEREF.localName; } } for (final URI file: conrefpushSet) { getOrCreateFileInfo(fileinfos, file).isConrefPush = true; } for (final URI file: keyrefSet) { getOrCreateFileInfo(fileinfos, file).hasKeyref = true; } for (final URI file: coderefSet) { getOrCreateFileInfo(fileinfos, file).hasCoderef = true; } for (final URI file: resourceOnlySet) { getOrCreateFileInfo(fileinfos, file).isResourceOnly = true; } for (final URI resource : resources) { getOrCreateFileInfo(fileinfos, resource).isInputResource = true; } addFlagImagesSetToProperties(job, relFlagImagesSet); final Map<URI, URI> filteredCopyTo = filterConflictingCopyTo(copyTo, fileinfos.values()); for (final FileInfo fs: fileinfos.values()) { if (!failureList.contains(fs.src)) { final URI src = filteredCopyTo.get(fs.src); // correct copy-to if (src != null) { final FileInfo corr = new FileInfo.Builder(fs).src(src).build(); job.add(corr); } else { job.add(fs); } } } for (final URI target : filteredCopyTo.keySet()) { final URI tmp = tempFileNameScheme.generateTempFileName(target); final FileInfo fi = new FileInfo.Builder().result(target).uri(tmp).build(); job.add(fi); } final FileInfo root = job.getFileInfo(rootFile); if (root == null) { throw new RuntimeException("Unable to set input file to job configuration"); } job.add(new FileInfo.Builder(root) .isInput(true) .build()); try { logger.info("Serializing job specification"); job.write(); } catch (final IOException e) { throw new DITAOTException("Failed to serialize job configuration files: " + e.getMessage(), e); } try { final SubjectSchemeReader subjectSchemeReader = new SubjectSchemeReader(); subjectSchemeReader.setLogger(logger); subjectSchemeReader.setJob(job); subjectSchemeReader.writeMapToXML(addMapFilePrefix(listFilter.getRelationshipGrap()), new File(job.tempDir, FILE_NAME_SUBJECT_RELATION)); subjectSchemeReader.writeMapToXML(addMapFilePrefix(schemeDictionary), new File(job.tempDir, FILE_NAME_SUBJECT_DICTIONARY)); } catch (final IOException e) { throw new DITAOTException("Failed to serialize subject scheme files: " + e.getMessage(), e); } if (INDEX_TYPE_ECLIPSEHELP.equals(transtype)) { final DelayConrefUtils delayConrefUtils = new DelayConrefUtils(); delayConrefUtils.setLogger(logger); delayConrefUtils.setJob(job); delayConrefUtils.writeMapToXML(exportAnchorsFilter.getPluginMap()); delayConrefUtils.writeExportAnchors(exportAnchorsFilter, tempFileNameScheme); } } /** Filter copy-to where target is used directly. */ private Map<URI, URI> filterConflictingCopyTo( final Map<URI, URI> copyTo, final Collection<FileInfo> fileInfos) { final Set<URI> fileinfoTargets = fileInfos.stream() .filter(fi -> fi.src.equals(fi.result)) .map(fi -> fi.result) .collect(Collectors.toSet()); return copyTo.entrySet().stream() .filter(e -> !fileinfoTargets.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } /** * Write list file. * @param inputfile output list file * @param relativeRootFile list value */ private void writeListFile(final File inputfile, final String relativeRootFile) { try (Writer bufferedWriter = new BufferedWriter(new OutputStreamWriter(job.getStore().getOutputStream(inputfile.toURI())))) { bufferedWriter.write(relativeRootFile); bufferedWriter.flush(); } catch (final IOException e) { logger.error(e.getMessage(), e) ; } } /** * Prefix path. * * @param relativeRootFile relative path for root temporary file * @return either an empty string or a path which ends in {@link java.io.File#separator File.separator} * */ private String getPrefix(final File relativeRootFile) { String res; final File p = relativeRootFile.getParentFile(); if (p != null) { res = p.toString() + File.separator; } else { res = ""; } return res; } private FileInfo getOrCreateFileInfo(final Map<URI, FileInfo> fileInfos, final URI file) { assert file.getFragment() == null; final URI f = file.normalize(); FileInfo.Builder b; if (fileInfos.containsKey(f)) { b = new FileInfo.Builder(fileInfos.get(f)); } else { b = new FileInfo.Builder().src(file); } b = b.uri(tempFileNameScheme.generateTempFileName(file)); final FileInfo i = b.build(); fileInfos.put(i.src, i); return i; } /** * Convert absolute paths to relative temporary directory paths * @return map with relative keys and values */ private Map<URI, Set<URI>> addMapFilePrefix(final Map<URI, Set<URI>> map) { final Map<URI, Set<URI>> res = new HashMap<>(); for (final Map.Entry<URI, Set<URI>> e: map.entrySet()) { final URI key = e.getKey(); final Set<URI> newSet = new HashSet<>(); for (final URI file: e.getValue()) { newSet.add(tempFileNameScheme.generateTempFileName(file)); } res.put(key.equals(ROOT_URI) ? key : tempFileNameScheme.generateTempFileName(key), newSet); } return res; } /** * Add file prefix. For absolute paths the prefix is not added. * * @param set file paths * @return file paths with prefix */ private Map<URI, URI> addFilePrefix(final Map<URI, URI> set) { final Map<URI, URI> newSet = new HashMap<>(); for (final Map.Entry<URI, URI> file: set.entrySet()) { final URI key = tempFileNameScheme.generateTempFileName(file.getKey()); final URI value = tempFileNameScheme.generateTempFileName(file.getValue()); newSet.put(key, value); } return newSet; } private Collection<KeyDef> addFilePrefix(final Collection<KeyDef> keydefs) { final Collection<KeyDef> res = new ArrayList<>(keydefs.size()); for (final KeyDef k: keydefs) { final URI source = tempFileNameScheme.generateTempFileName(k.source); res.add(new KeyDef(k.keys, k.href, k.scope, k.format, source, null)); } return res; } /** * add FlagImangesSet to Properties, which needn't to change the dir level, * just ouput to the ouput dir. * * @param prop job configuration * @param set absolute flag image files */ private void addFlagImagesSetToProperties(final Job prop, final Set<URI> set) { final Set<URI> newSet = new LinkedHashSet<>(128); for (final URI file: set) { // assert file.isAbsolute(); if (file.isAbsolute()) { // no need to append relative path before absolute paths newSet.add(file.normalize()); } else { // In ant, all the file separator should be slash, so we need to // replace all the back slash with slash. newSet.add(file.normalize()); } } // write list attribute to file final String fileKey = REL_FLAGIMAGE_LIST.substring(0, REL_FLAGIMAGE_LIST.lastIndexOf("list")) + "file"; prop.setProperty(fileKey, REL_FLAGIMAGE_LIST.substring(0, REL_FLAGIMAGE_LIST.lastIndexOf("list")) + ".list"); final File list = new File(job.tempDir, prop.getProperty(fileKey)); try (Writer bufferedWriter = new BufferedWriter(new OutputStreamWriter(job.getStore().getOutputStream(list.toURI())))) { final Iterator<URI> it = newSet.iterator(); while (it.hasNext()) { bufferedWriter.write(it.next().getPath()); if (it.hasNext()) { bufferedWriter.write("\n"); } } bufferedWriter.flush(); } catch (final IOException e) { logger.error(e.getMessage(), e) ; } prop.setProperty(REL_FLAGIMAGE_LIST, StringUtils.join(newSet, COMMA)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.net; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.util.ReflectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; /** The class represents a cluster of computer with a tree hierarchical * network topology. * For example, a cluster may be consists of many data centers filled * with racks of computers. * In a network topology, leaves represent data nodes (computers) and inner * nodes represent switches/routers that manage traffic in/out of data centers * or racks. * */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Unstable public class NetworkTopology { public final static String DEFAULT_RACK = "/default-rack"; public static final Logger LOG = LoggerFactory.getLogger(NetworkTopology.class); private static final char PATH_SEPARATOR = '/'; private static final String PATH_SEPARATOR_STR = "/"; private static final String ROOT = "/"; public static class InvalidTopologyException extends RuntimeException { private static final long serialVersionUID = 1L; public InvalidTopologyException(String msg) { super(msg); } } /** * Get an instance of NetworkTopology based on the value of the configuration * parameter net.topology.impl. * * @param conf the configuration to be used * @return an instance of NetworkTopology */ public static NetworkTopology getInstance(Configuration conf){ return getInstance(conf, InnerNodeImpl.FACTORY); } public static NetworkTopology getInstance(Configuration conf, InnerNode.Factory factory) { NetworkTopology nt = ReflectionUtils.newInstance( conf.getClass(CommonConfigurationKeysPublic.NET_TOPOLOGY_IMPL_KEY, NetworkTopology.class, NetworkTopology.class), conf); return nt.init(factory); } protected NetworkTopology init(InnerNode.Factory factory) { if (!factory.equals(this.factory)) { // the constructor has initialized the factory to default. So only init // again if another factory is specified. this.factory = factory; this.clusterMap = factory.newInnerNode(NodeBase.ROOT); } return this; } InnerNode.Factory factory; /** * the root cluster map */ InnerNode clusterMap; /** Depth of all leaf nodes */ private int depthOfAllLeaves = -1; /** rack counter */ protected int numOfRacks = 0; /** * Whether or not this cluster has ever consisted of more than 1 rack, * according to the NetworkTopology. */ private boolean clusterEverBeenMultiRack = false; /** the lock used to manage access */ protected ReadWriteLock netlock = new ReentrantReadWriteLock(); // keeping the constructor because other components like MR still uses this. public NetworkTopology() { this.factory = InnerNodeImpl.FACTORY; this.clusterMap = factory.newInnerNode(NodeBase.ROOT); } /** Add a leaf node * Update node counter & rack counter if necessary * @param node node to be added; can be null * @exception IllegalArgumentException if add a node to a leave or node to be added is not a leaf */ public void add(Node node) { if (node==null) return; int newDepth = NodeBase.locationToDepth(node.getNetworkLocation()) + 1; netlock.writeLock().lock(); try { if( node instanceof InnerNode ) { throw new IllegalArgumentException( "Not allow to add an inner node: "+NodeBase.getPath(node)); } if ((depthOfAllLeaves != -1) && (depthOfAllLeaves != newDepth)) { LOG.error("Error: can't add leaf node {} at depth {} to topology:{}\n", NodeBase.getPath(node), newDepth, this); throw new InvalidTopologyException("Failed to add " + NodeBase.getPath(node) + ": You cannot have a rack and a non-rack node at the same " + "level of the network topology."); } Node rack = getNodeForNetworkLocation(node); if (rack != null && !(rack instanceof InnerNode)) { throw new IllegalArgumentException("Unexpected data node " + node.toString() + " at an illegal network location"); } if (clusterMap.add(node)) { LOG.info("Adding a new node: "+NodeBase.getPath(node)); if (rack == null) { incrementRacks(); } if (!(node instanceof InnerNode)) { if (depthOfAllLeaves == -1) { depthOfAllLeaves = node.getLevel(); } } } LOG.debug("NetworkTopology became:\n{}", this); } finally { netlock.writeLock().unlock(); } } protected void incrementRacks() { numOfRacks++; if (!clusterEverBeenMultiRack && numOfRacks > 1) { clusterEverBeenMultiRack = true; } } /** * Return a reference to the node given its string representation. * Default implementation delegates to {@link #getNode(String)}. * * <p>To be overridden in subclasses for specific NetworkTopology * implementations, as alternative to overriding the full {@link #add(Node)} * method. * * @param node The string representation of this node's network location is * used to retrieve a Node object. * @return a reference to the node; null if the node is not in the tree * * @see #add(Node) * @see #getNode(String) */ protected Node getNodeForNetworkLocation(Node node) { return getNode(node.getNetworkLocation()); } /** * Given a string representation of a rack, return its children * @param loc a path-like string representation of a rack * @return a newly allocated list with all the node's children */ public List<Node> getDatanodesInRack(String loc) { netlock.readLock().lock(); try { loc = NodeBase.normalize(loc); if (!NodeBase.ROOT.equals(loc)) { loc = loc.substring(1); } InnerNode rack = (InnerNode) clusterMap.getLoc(loc); if (rack == null) { return null; } return new ArrayList<Node>(rack.getChildren()); } finally { netlock.readLock().unlock(); } } /** Remove a node * Update node counter and rack counter if necessary * @param node node to be removed; can be null */ public void remove(Node node) { if (node==null) return; if( node instanceof InnerNode ) { throw new IllegalArgumentException( "Not allow to remove an inner node: "+NodeBase.getPath(node)); } LOG.info("Removing a node: "+NodeBase.getPath(node)); netlock.writeLock().lock(); try { if (clusterMap.remove(node)) { InnerNode rack = (InnerNode)getNode(node.getNetworkLocation()); if (rack == null) { numOfRacks--; } } LOG.debug("NetworkTopology became:\n{}", this); } finally { netlock.writeLock().unlock(); } } /** Check if the tree contains node <i>node</i> * * @param node a node * @return true if <i>node</i> is already in the tree; false otherwise */ public boolean contains(Node node) { if (node == null) return false; netlock.readLock().lock(); try { Node parent = node.getParent(); for (int level = node.getLevel(); parent != null && level > 0; parent = parent.getParent(), level--) { if (parent == clusterMap) { return true; } } } finally { netlock.readLock().unlock(); } return false; } /** Given a string representation of a node, return its reference * * @param loc * a path-like string representation of a node * @return a reference to the node; null if the node is not in the tree */ public Node getNode(String loc) { netlock.readLock().lock(); try { loc = NodeBase.normalize(loc); if (!NodeBase.ROOT.equals(loc)) loc = loc.substring(1); return clusterMap.getLoc(loc); } finally { netlock.readLock().unlock(); } } /** * @return true if this cluster has ever consisted of multiple racks, even if * it is not now a multi-rack cluster. */ public boolean hasClusterEverBeenMultiRack() { return clusterEverBeenMultiRack; } /** Given a string representation of a rack for a specific network * location * * To be overridden in subclasses for specific NetworkTopology * implementations, as alternative to overriding the full * {@link #getRack(String)} method. * @param loc * a path-like string representation of a network location * @return a rack string */ public String getRack(String loc) { return loc; } /** @return the total number of racks */ public int getNumOfRacks() { netlock.readLock().lock(); try { return numOfRacks; } finally { netlock.readLock().unlock(); } } /** @return the total number of leaf nodes */ public int getNumOfLeaves() { netlock.readLock().lock(); try { return clusterMap.getNumOfLeaves(); } finally { netlock.readLock().unlock(); } } /** Return the distance between two nodes * It is assumed that the distance from one node to its parent is 1 * The distance between two nodes is calculated by summing up their distances * to their closest common ancestor. * @param node1 one node * @param node2 another node * @return the distance between node1 and node2 which is zero if they are the same * or {@link Integer#MAX_VALUE} if node1 or node2 do not belong to the cluster */ public int getDistance(Node node1, Node node2) { if ((node1 != null && node1.equals(node2)) || (node1 == null && node2 == null)) { return 0; } if (node1 == null || node2 == null) { LOG.warn("One of the nodes is a null pointer"); return Integer.MAX_VALUE; } Node n1=node1, n2=node2; int dis = 0; netlock.readLock().lock(); try { int level1=node1.getLevel(), level2=node2.getLevel(); while(n1!=null && level1>level2) { n1 = n1.getParent(); level1--; dis++; } while(n2!=null && level2>level1) { n2 = n2.getParent(); level2--; dis++; } while(n1!=null && n2!=null && n1.getParent()!=n2.getParent()) { n1=n1.getParent(); n2=n2.getParent(); dis+=2; } } finally { netlock.readLock().unlock(); } if (n1==null) { LOG.warn("The cluster does not contain node: "+NodeBase.getPath(node1)); return Integer.MAX_VALUE; } if (n2==null) { LOG.warn("The cluster does not contain node: "+NodeBase.getPath(node2)); return Integer.MAX_VALUE; } return dis+2; } /** Return the distance between two nodes by comparing their network paths * without checking if they belong to the same ancestor node by reference. * It is assumed that the distance from one node to its parent is 1 * The distance between two nodes is calculated by summing up their distances * to their closest common ancestor. * @param node1 one node * @param node2 another node * @return the distance between node1 and node2 */ static public int getDistanceByPath(Node node1, Node node2) { if (node1 == null && node2 == null) { return 0; } if (node1 == null || node2 == null) { LOG.warn("One of the nodes is a null pointer"); return Integer.MAX_VALUE; } String[] paths1 = NodeBase.getPathComponents(node1); String[] paths2 = NodeBase.getPathComponents(node2); int dis = 0; int index = 0; int minLevel = Math.min(paths1.length, paths2.length); while (index < minLevel) { if (!paths1[index].equals(paths2[index])) { // Once the path starts to diverge, compute the distance that include // the rest of paths. dis += 2 * (minLevel - index); break; } index++; } dis += Math.abs(paths1.length - paths2.length); return dis; } /** Check if two nodes are on the same rack * @param node1 one node (can be null) * @param node2 another node (can be null) * @return true if node1 and node2 are on the same rack; false otherwise * @exception IllegalArgumentException when either node1 or node2 is null, or * node1 or node2 do not belong to the cluster */ public boolean isOnSameRack( Node node1, Node node2) { if (node1 == null || node2 == null) { return false; } netlock.readLock().lock(); try { return isSameParents(node1, node2); } finally { netlock.readLock().unlock(); } } /** * Check if network topology is aware of NodeGroup */ public boolean isNodeGroupAware() { return false; } /** * Return false directly as not aware of NodeGroup, to be override in sub-class */ public boolean isOnSameNodeGroup(Node node1, Node node2) { return false; } /** * Compare the parents of each node for equality * * <p>To be overridden in subclasses for specific NetworkTopology * implementations, as alternative to overriding the full * {@link #isOnSameRack(Node, Node)} method. * * @param node1 the first node to compare * @param node2 the second node to compare * @return true if their parents are equal, false otherwise * * @see #isOnSameRack(Node, Node) */ protected boolean isSameParents(Node node1, Node node2) { return node1.getParent()==node2.getParent(); } private static final Random r = new Random(); @VisibleForTesting void setRandomSeed(long seed) { r.setSeed(seed); } /** * Randomly choose a node. * * @param scope range of nodes from which a node will be chosen * @return the chosen node * * @see #chooseRandom(String, Collection) */ public Node chooseRandom(final String scope) { return chooseRandom(scope, null); } /** * Randomly choose one node from <i>scope</i>. * * If scope starts with ~, choose one from the all nodes except for the * ones in <i>scope</i>; otherwise, choose one from <i>scope</i>. * If excludedNodes is given, choose a node that's not in excludedNodes. * * @param scope range of nodes from which a node will be chosen * @param excludedNodes nodes to be excluded from * @return the chosen node */ public Node chooseRandom(final String scope, final Collection<Node> excludedNodes) { netlock.readLock().lock(); try { if (scope.startsWith("~")) { return chooseRandom(NodeBase.ROOT, scope.substring(1), excludedNodes); } else { return chooseRandom(scope, null, excludedNodes); } } finally { netlock.readLock().unlock(); } } protected Node chooseRandom(final String scope, String excludedScope, final Collection<Node> excludedNodes) { if (excludedScope != null) { if (scope.startsWith(excludedScope)) { return null; } if (!excludedScope.startsWith(scope)) { excludedScope = null; } } Node node = getNode(scope); if (!(node instanceof InnerNode)) { return excludedNodes != null && excludedNodes.contains(node) ? null : node; } InnerNode innerNode = (InnerNode)node; int numOfDatanodes = innerNode.getNumOfLeaves(); if (excludedScope == null) { node = null; } else { node = getNode(excludedScope); if (!(node instanceof InnerNode)) { numOfDatanodes -= 1; } else { numOfDatanodes -= ((InnerNode)node).getNumOfLeaves(); } } if (numOfDatanodes <= 0) { LOG.debug("Failed to find datanode (scope=\"{}\" excludedScope=\"{}\")." + " numOfDatanodes={}", scope, excludedScope, numOfDatanodes); return null; } final int availableNodes; if (excludedScope == null) { availableNodes = countNumOfAvailableNodes(scope, excludedNodes); } else { availableNodes = countNumOfAvailableNodes("~" + excludedScope, excludedNodes); } LOG.debug("Choosing random from {} available nodes on node {}," + " scope={}, excludedScope={}, excludeNodes={}. numOfDatanodes={}.", availableNodes, innerNode, scope, excludedScope, excludedNodes, numOfDatanodes); Node ret = null; if (availableNodes > 0) { ret = chooseRandom(innerNode, node, excludedNodes, numOfDatanodes, availableNodes); } LOG.debug("chooseRandom returning {}", ret); return ret; } /** * Randomly choose one node under <i>parentNode</i>, considering the exclude * nodes and scope. Should be called with {@link #netlock}'s readlock held. * * @param parentNode the parent node * @param excludedScopeNode the node corresponding to the exclude scope. * @param excludedNodes a collection of nodes to be excluded from * @param totalInScopeNodes total number of nodes under parentNode, excluding * the excludedScopeNode * @param availableNodes number of available nodes under parentNode that * could be chosen, excluding excludedNodes * @return the chosen node, or null if none can be chosen */ private Node chooseRandom(final InnerNode parentNode, final Node excludedScopeNode, final Collection<Node> excludedNodes, final int totalInScopeNodes, final int availableNodes) { Preconditions.checkArgument( totalInScopeNodes >= availableNodes && availableNodes > 0, String .format("%d should >= %d, and both should be positive.", totalInScopeNodes, availableNodes)); if (excludedNodes == null || excludedNodes.isEmpty()) { // if there are no excludedNodes, randomly choose a node final int index = r.nextInt(totalInScopeNodes); return parentNode.getLeaf(index, excludedScopeNode); } // excludedNodes non empty. // Choose the nth VALID node, where n is random. VALID meaning it can be // returned, after considering exclude scope and exclude nodes. // The probability of being chosen should be equal for all VALID nodes. // Notably, we do NOT choose nth node, and find the next valid node // if n is excluded - this will make the probability of the node immediately // after an excluded node higher. // // Start point is always 0 and that's fine, because the nth valid node // logic provides equal randomness. // // Consider this example, where 1,3,5 out of the 10 nodes are excluded: // 1 2 3 4 5 6 7 8 9 10 // x x x // We will randomly choose the nth valid node where n is [0,6]. // We do NOT choose a random number n and just use the closest valid node, // for example both n=3 and n=4 will choose 4, making it a 2/10 probability, // higher than the expected 1/7 // totalInScopeNodes=10 and availableNodes=7 in this example. int nthValidToReturn = r.nextInt(availableNodes); LOG.debug("nthValidToReturn is {}", nthValidToReturn); Node ret = parentNode.getLeaf(r.nextInt(totalInScopeNodes), excludedScopeNode); if (!excludedNodes.contains(ret)) { // return if we're lucky enough to get a valid node at a random first pick LOG.debug("Chosen node {} from first random", ret); return ret; } else { ret = null; } Node lastValidNode = null; for (int i = 0; i < totalInScopeNodes; ++i) { ret = parentNode.getLeaf(i, excludedScopeNode); if (!excludedNodes.contains(ret)) { if (nthValidToReturn == 0) { break; } --nthValidToReturn; lastValidNode = ret; } else { LOG.debug("Node {} is excluded, continuing.", ret); ret = null; } } if (ret == null && lastValidNode != null) { LOG.error("BUG: Found lastValidNode {} but not nth valid node. " + "parentNode={}, excludedScopeNode={}, excludedNodes={}, " + "totalInScopeNodes={}, availableNodes={}, nthValidToReturn={}.", lastValidNode, parentNode, excludedScopeNode, excludedNodes, totalInScopeNodes, availableNodes, nthValidToReturn); ret = lastValidNode; } return ret; } /** return leaves in <i>scope</i> * @param scope a path string * @return leaves nodes under specific scope */ public List<Node> getLeaves(String scope) { Node node = getNode(scope); List<Node> leafNodes = new ArrayList<Node>(); if (!(node instanceof InnerNode)) { leafNodes.add(node); } else { InnerNode innerNode = (InnerNode) node; for (int i=0;i<innerNode.getNumOfLeaves();i++) { leafNodes.add(innerNode.getLeaf(i, null)); } } return leafNodes; } /** return the number of leaves in <i>scope</i> but not in <i>excludedNodes</i> * if scope starts with ~, return the number of nodes that are not * in <i>scope</i> and <i>excludedNodes</i>; * @param scope a path string that may start with ~ * @param excludedNodes a list of nodes * @return number of available nodes */ @VisibleForTesting public int countNumOfAvailableNodes(String scope, Collection<Node> excludedNodes) { boolean isExcluded=false; if (scope.startsWith("~")) { isExcluded=true; scope=scope.substring(1); } scope = NodeBase.normalize(scope); int excludedCountInScope = 0; // the number of nodes in both scope & excludedNodes int excludedCountOffScope = 0; // the number of nodes outside scope & excludedNodes netlock.readLock().lock(); try { if (excludedNodes != null) { for (Node node : excludedNodes) { node = getNode(NodeBase.getPath(node)); if (node == null) { continue; } if ((NodeBase.getPath(node) + NodeBase.PATH_SEPARATOR_STR) .startsWith(scope + NodeBase.PATH_SEPARATOR_STR)) { excludedCountInScope++; } else { excludedCountOffScope++; } } } Node n = getNode(scope); int scopeNodeCount = 0; if (n != null) { scopeNodeCount++; } if (n instanceof InnerNode) { scopeNodeCount=((InnerNode)n).getNumOfLeaves(); } if (isExcluded) { return clusterMap.getNumOfLeaves() - scopeNodeCount - excludedCountOffScope; } else { return scopeNodeCount - excludedCountInScope; } } finally { netlock.readLock().unlock(); } } /** convert a network tree to a string. */ @Override public String toString() { // print the number of racks StringBuilder tree = new StringBuilder(); tree.append("Number of racks: "); tree.append(numOfRacks); tree.append("\n"); // print the number of leaves int numOfLeaves = getNumOfLeaves(); tree.append("Expected number of leaves:"); tree.append(numOfLeaves); tree.append("\n"); // print nodes for(int i=0; i<numOfLeaves; i++) { tree.append(NodeBase.getPath(clusterMap.getLeaf(i, null))); tree.append("\n"); } return tree.toString(); } /** * Divide networklocation string into two parts by last separator, and get * the first part here. * * @param networkLocation * @return */ public static String getFirstHalf(String networkLocation) { int index = networkLocation.lastIndexOf(NodeBase.PATH_SEPARATOR_STR); return networkLocation.substring(0, index); } /** * Divide networklocation string into two parts by last separator, and get * the second part here. * * @param networkLocation * @return */ public static String getLastHalf(String networkLocation) { int index = networkLocation.lastIndexOf(NodeBase.PATH_SEPARATOR_STR); return networkLocation.substring(index); } /** * Returns an integer weight which specifies how far away {node} is away from * {reader}. A lower value signifies that a node is closer. * * @param reader Node where data will be read * @param node Replica of data * @return weight */ protected int getWeight(Node reader, Node node) { // 0 is local, 2 is same rack, and each level on each node increases the //weight by 1 //Start off by initializing to Integer.MAX_VALUE int weight = Integer.MAX_VALUE; if (reader != null && node != null) { if(reader.equals(node)) { return 0; } int maxReaderLevel = reader.getLevel(); int maxNodeLevel = node.getLevel(); int currentLevelToCompare = maxReaderLevel > maxNodeLevel ? maxNodeLevel : maxReaderLevel; Node r = reader; Node n = node; weight = 0; while(r != null && r.getLevel() > currentLevelToCompare) { r = r.getParent(); weight++; } while(n != null && n.getLevel() > currentLevelToCompare) { n = n.getParent(); weight++; } while(r != null && n != null && !r.equals(n)) { r = r.getParent(); n = n.getParent(); weight+=2; } } return weight; } /** * Returns an integer weight which specifies how far away <i>node</i> is * from <i>reader</i>. A lower value signifies that a node is closer. * It uses network location to calculate the weight * * @param reader Node where data will be read * @param node Replica of data * @return weight */ private static int getWeightUsingNetworkLocation(Node reader, Node node) { //Start off by initializing to Integer.MAX_VALUE int weight = Integer.MAX_VALUE; if(reader != null && node != null) { String readerPath = normalizeNetworkLocationPath( reader.getNetworkLocation()); String nodePath = normalizeNetworkLocationPath( node.getNetworkLocation()); //same rack if(readerPath.equals(nodePath)) { if(reader.getName().equals(node.getName())) { weight = 0; } else { weight = 2; } } else { String[] readerPathToken = readerPath.split(PATH_SEPARATOR_STR); String[] nodePathToken = nodePath.split(PATH_SEPARATOR_STR); int maxLevelToCompare = readerPathToken.length > nodePathToken.length ? nodePathToken.length : readerPathToken.length; int currentLevel = 1; //traverse through the path and calculate the distance while(currentLevel < maxLevelToCompare) { if(!readerPathToken[currentLevel] .equals(nodePathToken[currentLevel])){ break; } currentLevel++; } weight = (readerPathToken.length - currentLevel) + (nodePathToken.length - currentLevel); } } return weight; } /** Normalize a path by stripping off any trailing {@link #PATH_SEPARATOR}. * @param path path to normalize. * @return the normalised path * If <i>path</i>is null or empty {@link #ROOT} is returned * @throws IllegalArgumentException if the first character of a non empty path * is not {@link #PATH_SEPARATOR} */ private static String normalizeNetworkLocationPath(String path) { if (path == null || path.length() == 0) { return ROOT; } if (path.charAt(0) != PATH_SEPARATOR) { throw new IllegalArgumentException("Network Location" + "path doesn't start with " +PATH_SEPARATOR+ ": "+path); } int len = path.length(); if (path.charAt(len-1) == PATH_SEPARATOR) { return path.substring(0, len-1); } return path; } /** * Sort nodes array by network distance to <i>reader</i>. * <p/> * In a three-level topology, a node can be either local, on the same rack, * or on a different rack from the reader. Sorting the nodes based on network * distance from the reader reduces network traffic and improves * performance. * <p/> * As an additional twist, we also randomize the nodes at each network * distance. This helps with load balancing when there is data skew. * * @param reader Node where data will be read * @param nodes Available replicas with the requested data * @param activeLen Number of active nodes at the front of the array */ public void sortByDistance(Node reader, Node[] nodes, int activeLen) { /* * This method is called if the reader is a datanode, * so nonDataNodeReader flag is set to false. */ sortByDistance(reader, nodes, activeLen, false); } /** * Sort nodes array by network distance to <i>reader</i>. * <p/> using network location. This is used when the reader * is not a datanode. Sorting the nodes based on network distance * from the reader reduces network traffic and improves * performance. * <p/> * * @param reader Node where data will be read * @param nodes Available replicas with the requested data * @param activeLen Number of active nodes at the front of the array */ public void sortByDistanceUsingNetworkLocation(Node reader, Node[] nodes, int activeLen) { /* * This method is called if the reader is not a datanode, * so nonDataNodeReader flag is set to true. */ sortByDistance(reader, nodes, activeLen, true); } /** * Sort nodes array by network distance to <i>reader</i>. * <p/> * As an additional twist, we also randomize the nodes at each network * distance. This helps with load balancing when there is data skew. * * @param reader Node where data will be read * @param nodes Available replicas with the requested data * @param activeLen Number of active nodes at the front of the array * @param nonDataNodeReader True if the reader is not a datanode */ private void sortByDistance(Node reader, Node[] nodes, int activeLen, boolean nonDataNodeReader) { /** Sort weights for the nodes array */ int[] weights = new int[activeLen]; for (int i=0; i<activeLen; i++) { if(nonDataNodeReader) { weights[i] = getWeightUsingNetworkLocation(reader, nodes[i]); } else { weights[i] = getWeight(reader, nodes[i]); } } // Add weight/node pairs to a TreeMap to sort TreeMap<Integer, List<Node>> tree = new TreeMap<Integer, List<Node>>(); for (int i=0; i<activeLen; i++) { int weight = weights[i]; Node node = nodes[i]; List<Node> list = tree.get(weight); if (list == null) { list = Lists.newArrayListWithExpectedSize(1); tree.put(weight, list); } list.add(node); } int idx = 0; for (List<Node> list: tree.values()) { if (list != null) { Collections.shuffle(list, r); for (Node n: list) { nodes[idx] = n; idx++; } } } Preconditions.checkState(idx == activeLen, "Sorted the wrong number of nodes!"); } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.lantern.world; import static com.google.common.base.Preconditions.checkNotNull; import com.flowpowered.math.vector.Vector2i; import com.flowpowered.math.vector.Vector3d; import com.flowpowered.math.vector.Vector3i; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.BlockType; import org.spongepowered.api.block.ScheduledBlockUpdate; import org.spongepowered.api.block.tileentity.TileEntity; import org.spongepowered.api.data.DataContainer; import org.spongepowered.api.data.DataHolder; import org.spongepowered.api.data.DataTransactionResult; import org.spongepowered.api.data.DataView; import org.spongepowered.api.data.Property; import org.spongepowered.api.data.key.Key; import org.spongepowered.api.data.manipulator.DataManipulator; import org.spongepowered.api.data.merge.MergeFunction; import org.spongepowered.api.data.value.BaseValue; import org.spongepowered.api.data.value.immutable.ImmutableValue; import org.spongepowered.api.effect.particle.ParticleEffect; import org.spongepowered.api.effect.sound.SoundType; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.entity.EntitySnapshot; import org.spongepowered.api.entity.EntityType; import org.spongepowered.api.event.cause.Cause; import org.spongepowered.api.scoreboard.Scoreboard; import org.spongepowered.api.service.permission.context.Context; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.chat.ChatType; import org.spongepowered.api.text.title.Title; import org.spongepowered.api.util.Direction; import org.spongepowered.api.util.DiscreteTransform2; import org.spongepowered.api.util.DiscreteTransform3; import org.spongepowered.api.util.persistence.InvalidDataException; import org.spongepowered.api.world.Chunk; import org.spongepowered.api.world.Dimension; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.PlayerSimulator; import org.spongepowered.api.world.TeleporterAgent; import org.spongepowered.api.world.World; import org.spongepowered.api.world.WorldBorder; import org.spongepowered.api.world.WorldCreationSettings; import org.spongepowered.api.world.biome.BiomeType; import org.spongepowered.api.world.difficulty.Difficulty; import org.spongepowered.api.world.explosion.Explosion; import org.spongepowered.api.world.extent.Extent; import org.spongepowered.api.world.extent.ImmutableBiomeArea; import org.spongepowered.api.world.extent.ImmutableBlockVolume; import org.spongepowered.api.world.extent.MutableBiomeArea; import org.spongepowered.api.world.extent.MutableBlockVolume; import org.spongepowered.api.world.extent.StorageType; import org.spongepowered.api.world.extent.UnmodifiableBiomeArea; import org.spongepowered.api.world.extent.UnmodifiableBlockVolume; import org.spongepowered.api.world.gen.WorldGenerator; import org.spongepowered.api.world.storage.WorldProperties; import org.spongepowered.api.world.weather.Weather; import org.spongepowered.lantern.world.storage.LanternChunkLayout; import org.spongepowered.lantern.world.storage.LanternWorldStorage; import java.util.Collection; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.function.Predicate; public class LanternWorld implements World { private final WorldProperties properties; private final LanternWorldStorage storage; private final ChunkManager chunkManager; public LanternWorld(LanternWorldStorage storage, WorldProperties properties) { this.storage = checkNotNull(storage); this.properties = checkNotNull(properties); this.chunkManager = new ChunkManager(this); } /** * Updates all the entities within this world. */ public void pulse() { //TODO: Implement } @Override public Difficulty getDifficulty() { return properties.getDifficulty(); } @Override public String getName() { return properties.getWorldName(); } @Override public Optional<Chunk> getChunk(Vector3i position) { return getChunk(position.getX(), position.getY(), position.getY()); } @Override public Optional<Chunk> getChunk(int x, int y, int z) { if (!LanternChunkLayout.instance.isValidChunk(x, y, z)) { return Optional.empty(); } if(chunkManager.isChunkLoaded(x, z)) { return Optional.of(chunkManager.getChunk(x, z)); } return Optional.empty(); } @Override public Optional<Chunk> loadChunk(Vector3i position, boolean shouldGenerate) { return loadChunk(position.getX(), position.getY(), position.getZ(), shouldGenerate); } @Override public Optional<Chunk> loadChunk(int x, int y, int z, boolean shouldGenerate) { if (!LanternChunkLayout.instance.isValidChunk(x, y, z)) { return Optional.empty(); } if(chunkManager.isChunkLoaded(x, z)) return Optional.of(chunkManager.getChunk(x, z)); chunkManager.loadChunk(x, z, shouldGenerate); if(chunkManager.isChunkLoaded(x, z)) return Optional.of(chunkManager.getChunk(x, z)); return Optional.empty(); } @Override public boolean unloadChunk(Chunk chunk) { return false; //TODO: Implement } @Override public Iterable<Chunk> getLoadedChunks() { return null; //TODO: Implement } @Override public Optional<Entity> getEntity(UUID uuid) { return null; //TODO: Implement } @Override public WorldBorder getWorldBorder() { return null; //TODO: Implement } @Override public Optional<String> getGameRule(String gameRule) { return null; //TODO: Implement } @Override public Map<String, String> getGameRules() { return null; //TODO: Implement } @Override public Dimension getDimension() { return null; //TODO: Implement } @Override public WorldGenerator getWorldGenerator() { return null; //TODO: Implement } @Override public void setWorldGenerator(WorldGenerator generator) { //TODO: Implement } @Override public boolean doesKeepSpawnLoaded() { return false; //TODO: Implement } @Override public void setKeepSpawnLoaded(boolean keepLoaded) { //TODO: Implement } @Override public LanternWorldStorage getWorldStorage() { return this.storage; } @Override public Scoreboard getScoreboard() { return null; //TODO: Implement } @Override public void setScoreboard(Scoreboard scoreboard) { //TODO: Implement } @Override public WorldCreationSettings getCreationSettings() { return new LanternWorldCreationSettings(properties); } @Override public WorldProperties getProperties() { return this.properties; } @Override public Location<World> getSpawnLocation() { return null; //TODO: Implement } @Override public void triggerExplosion(Explosion explosion) { //TODO: Implement } @Override public TeleporterAgent getTeleporterAgent() { return null; //TODO: Implement } @Override public PlayerSimulator getPlayerSimulator() { return null; //TODO: Implement } @Override public Context getContext() { return null; //TODO: Implement } @Override public void setBlock(Vector3i position, BlockState block, boolean notifyNeighbors) { //TODO: Implement } @Override public void setBlock(int x, int y, int z, BlockState block, boolean notifyNeighbors) { //TODO: Implement } @Override public void setBlockType(Vector3i position, BlockType type, boolean notifyNeighbors) { //TODO: Implement } @Override public void setBlockType(int x, int y, int z, BlockType type, boolean notifyNeighbors) { //TODO: Implement } @Override public BlockSnapshot createSnapshot(Vector3i position) { return null; //TODO: Implement } @Override public BlockSnapshot createSnapshot(int x, int y, int z) { return null; //TODO: Implement } @Override public boolean restoreSnapshot(BlockSnapshot snapshot, boolean force, boolean notifyNeighbors) { return false; //TODO: Implement } @Override public boolean restoreSnapshot(Vector3i position, BlockSnapshot snapshot, boolean force, boolean notifyNeighbors) { return false; //TODO: Implement } @Override public boolean restoreSnapshot(int x, int y, int z, BlockSnapshot snapshot, boolean force, boolean notifyNeighbors) { return false; //TODO: Implement } @Override public Collection<ScheduledBlockUpdate> getScheduledUpdates(Vector3i position) { return null; //TODO: Implement } @Override public Collection<ScheduledBlockUpdate> getScheduledUpdates(int x, int y, int z) { return null; //TODO: Implement } @Override public ScheduledBlockUpdate addScheduledUpdate(Vector3i position, int priority, int ticks) { return null; //TODO: Implement } @Override public ScheduledBlockUpdate addScheduledUpdate(int x, int y, int z, int priority, int ticks) { return null; //TODO: Implement } @Override public void removeScheduledUpdate(Vector3i position, ScheduledBlockUpdate update) { //TODO: Implement } @Override public void removeScheduledUpdate(int x, int y, int z, ScheduledBlockUpdate update) { //TODO: Implement } @Override public boolean isLoaded() { return false; //TODO: Implement } @Override public Extent getExtentView(Vector3i newMin, Vector3i newMax) { return null; //TODO: Implement } @Override public Extent getExtentView(DiscreteTransform3 transform) { return null; //TODO: Implement } @Override public Extent getRelativeExtentView() { return null; //TODO: Implement } @Override public Vector2i getBiomeMin() { return null; //TODO: Implement } @Override public Vector2i getBiomeMax() { return null; //TODO: Implement } @Override public Vector2i getBiomeSize() { return null; //TODO: Implement } @Override public boolean containsBiome(Vector2i position) { return false; //TODO: Implement } @Override public boolean containsBiome(int x, int z) { return false; //TODO: Implement } @Override public BiomeType getBiome(Vector2i position) { return null; //TODO: Implement } @Override public BiomeType getBiome(int x, int z) { return null; //TODO: Implement } @Override public UnmodifiableBiomeArea getUnmodifiableBiomeView() { return null; //TODO: Implement } @Override public MutableBiomeArea getBiomeCopy() { return null; //TODO: Implement } @Override public MutableBiomeArea getBiomeCopy(StorageType type) { return null; //TODO: Implement } @Override public ImmutableBiomeArea getImmutableBiomeCopy() { return null; //TODO: Implement } @Override public Vector3i getBlockMin() { return null; //TODO: Implement } @Override public Vector3i getBlockMax() { return null; //TODO: Implement } @Override public Vector3i getBlockSize() { return null; //TODO: Implement } @Override public boolean containsBlock(int x, int y, int z) { return false; //TODO: Implement } @Override public BlockState getBlock(Vector3i position) { return null; //TODO: Implement } @Override public BlockState getBlock(int x, int y, int z) { return null; //TODO: Implement } @Override public BlockType getBlockType(Vector3i position) { return null; //TODO: Implement } @Override public BlockType getBlockType(int x, int y, int z) { return null; //TODO: Implement } @Override public UnmodifiableBlockVolume getUnmodifiableBlockView() { return null; //TODO: Implement } @Override public MutableBlockVolume getBlockCopy() { return null; //TODO: Implement } @Override public MutableBlockVolume getBlockCopy(StorageType type) { return null; //TODO: Implement } @Override public ImmutableBlockVolume getImmutableBlockCopy() { return null; //TODO: Implement } @Override public Collection<Entity> getEntities() { return null; //TODO: Implement } @Override public Collection<Entity> getEntities(Predicate<Entity> filter) { return null; //TODO: Implement } @Override public Optional<Entity> createEntity(EntityType type, Vector3d position) { return null; //TODO: Implement } @Override public Optional<Entity> createEntity(EntityType type, Vector3i position) { return null; //TODO: Implement } @Override public Optional<Entity> createEntity(DataContainer entityContainer) { return null; //TODO: Implement } @Override public Optional<Entity> createEntity(DataContainer entityContainer, Vector3d position) { return null; //TODO: Implement } @Override public Optional<Entity> restoreSnapshot(EntitySnapshot snapshot, Vector3d position) { return null; //TODO: Implement } @Override public boolean spawnEntity(Entity entity, Cause cause) { return false; //TODO: Implement } @Override public UUID getUniqueId() { return null; //TODO: Implement } @Override public <T extends Property<?, ?>> Optional<T> getProperty(int x, int y, int z, Class<T> propertyClass) { return null; //TODO: Implement } @Override public <T extends Property<?, ?>> Optional<T> getProperty(int x, int y, int z, Direction direction, Class<T> propertyClass) { return null; //TODO: Implement } @Override public Collection<Property<?, ?>> getProperties(int x, int y, int z) { return null; //TODO: Implement } @Override public <E> Optional<E> get(int x, int y, int z, Key<? extends BaseValue<E>> key) { return null; //TODO: Implement } @Override public <T extends DataManipulator<?, ?>> Optional<T> get(int x, int y, int z, Class<T> manipulatorClass) { return null; //TODO: Implement } @Override public <T extends DataManipulator<?, ?>> Optional<T> getOrCreate(int x, int y, int z, Class<T> manipulatorClass) { return null; //TODO: Implement } @Override public <E, V extends BaseValue<E>> Optional<V> getValue(int x, int y, int z, Key<V> key) { return null; //TODO: Implement } @Override public boolean supports(int x, int y, int z, Key<?> key) { return false; //TODO: Implement } @Override public boolean supports(int x, int y, int z, Class<? extends DataManipulator<?, ?>> manipulatorClass) { return false; //TODO: Implement } @Override public Set<Key<?>> getKeys(int x, int y, int z) { return null; //TODO: Implement } @Override public Set<ImmutableValue<?>> getValues(int x, int y, int z) { return null; //TODO: Implement } @Override public <E> DataTransactionResult offer(int x, int y, int z, Key<? extends BaseValue<E>> key, E value) { return null; //TODO: Implement } @Override public DataTransactionResult offer(int x, int y, int z, DataManipulator<?, ?> manipulator, MergeFunction function) { return null; //TODO: Implement } @Override public DataTransactionResult remove(int x, int y, int z, Class<? extends DataManipulator<?, ?>> manipulatorClass) { return null; //TODO: Implement } @Override public DataTransactionResult remove(int x, int y, int z, Key<?> key) { return null; //TODO: Implement } @Override public DataTransactionResult undo(int x, int y, int z, DataTransactionResult result) { return null; //TODO: Implement } @Override public DataTransactionResult copyFrom(int xTo, int yTo, int zTo, DataHolder from) { return null; //TODO: Implement } @Override public DataTransactionResult copyFrom(int xTo, int yTo, int zTo, DataHolder from, MergeFunction function) { return null; //TODO: Implement } @Override public DataTransactionResult copyFrom(int xTo, int yTo, int zTo, int xFrom, int yFrom, int zFrom, MergeFunction function) { return null; //TODO: Implement } @Override public Collection<DataManipulator<?, ?>> getManipulators(int x, int y, int z) { return null; //TODO: Implement } @Override public boolean validateRawData(int x, int y, int z, DataView container) { return false; //TODO: Implement } @Override public void setRawData(int x, int y, int z, DataView container) throws InvalidDataException { //TODO: Implement } @Override public void setBiome(Vector2i position, BiomeType biome) { //TODO: Implement } @Override public void setBiome(int x, int z, BiomeType biome) { //TODO: Implement } @Override public MutableBiomeArea getBiomeView(Vector2i newMin, Vector2i newMax) { return null; //TODO: Implement } @Override public MutableBiomeArea getBiomeView(DiscreteTransform2 transform) { return null; //TODO: Implement } @Override public MutableBiomeArea getRelativeBiomeView() { return null; //TODO: Implement } @Override public Collection<TileEntity> getTileEntities() { return null; //TODO: Implement } @Override public Collection<TileEntity> getTileEntities(Predicate<TileEntity> filter) { return null; //TODO: Implement } @Override public Optional<TileEntity> getTileEntity(Vector3i position) { return null; //TODO: Implement } @Override public Optional<TileEntity> getTileEntity(int x, int y, int z) { return null; //TODO: Implement } @Override public void setBlock(Vector3i position, BlockState block) { //TODO: Implement } @Override public void setBlock(int x, int y, int z, BlockState block) { //TODO: Implement } @Override public void setBlockType(Vector3i position, BlockType type) { //TODO: Implement } @Override public void setBlockType(int x, int y, int z, BlockType type) { //TODO: Implement } @Override public MutableBlockVolume getBlockView(Vector3i newMin, Vector3i newMax) { return null; //TODO: Implement } @Override public MutableBlockVolume getBlockView(DiscreteTransform3 transform) { return null; //TODO: Implement } @Override public MutableBlockVolume getRelativeBlockView() { return null; //TODO: Implement } @Override public boolean containsBlock(Vector3i position) { return false; //TODO: Implement } @Override public void spawnParticles(ParticleEffect particleEffect, Vector3d position) { //TODO: Implement } @Override public void spawnParticles(ParticleEffect particleEffect, Vector3d position, int radius) { //TODO: Implement } @Override public void playSound(SoundType sound, Vector3d position, double volume) { //TODO: Implement } @Override public void playSound(SoundType sound, Vector3d position, double volume, double pitch) { //TODO: Implement } @Override public void playSound(SoundType sound, Vector3d position, double volume, double pitch, double minVolume) { //TODO: Implement } @Override public void sendTitle(Title title) { //TODO: Implement } @Override public Weather getWeather() { return null; //TODO: Implement } @Override public long getRemainingDuration() { return 0; //TODO: Implement } @Override public long getRunningDuration() { return 0; //TODO: Implement } @Override public void forecast(Weather weather) { //TODO: Implement } @Override public void forecast(Weather weather, long duration) { //TODO: Implement } public void setTime(long time) { // TODO: Implement } public void setFullTime(long time) { // TODO: Implement } public void setSpawnLocation(int x, int y, int z) { // TODO: Implement } public void setGameRule(String rule, String value) { // TODO: Implement } @Override public Collection<Direction> getFacesWithProperty(int x, int y, int z, Class<? extends Property<?, ?>> propertyClass) { return null; //TODO: Implement } @Override public void sendMessage(ChatType type, Text message) { //TODO: Implement } @Override public void sendMessages(ChatType type, Text... messages) { //TODO: Implement } @Override public void sendMessages(ChatType type, Iterable<Text> messages) { //TODO: Implement } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.lucene.queries; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryUtils; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; import org.apache.lucene.util.TestUtil; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class BlendedTermQueryTests extends ESTestCase { public void testBooleanQuery() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); String[] firstNames = new String[]{ "simon", "paul" }; String[] surNames = new String[]{ "willnauer", "simon" }; for (int i = 0; i < surNames.length; i++) { Document d = new Document(); d.add(new TextField("id", Integer.toString(i), Field.Store.YES)); d.add(new TextField("firstname", firstNames[i], Field.Store.NO)); d.add(new TextField("surname", surNames[i], Field.Store.NO)); w.addDocument(d); } int iters = scaledRandomIntBetween(25, 100); for (int j = 0; j < iters; j++) { Document d = new Document(); d.add(new TextField("id", Integer.toString(firstNames.length + j), Field.Store.YES)); d.add(new TextField("firstname", rarely() ? "some_other_name" : "simon the sorcerer", Field.Store.NO)); // make sure length-norm is the tie-breaker d.add(new TextField("surname", "bogus", Field.Store.NO)); w.addDocument(d); } w.commit(); DirectoryReader reader = DirectoryReader.open(w); IndexSearcher searcher = setSimilarity(newSearcher(reader)); { Term[] terms = new Term[]{new Term("firstname", "simon"), new Term("surname", "simon")}; BlendedTermQuery query = BlendedTermQuery.booleanBlendedQuery(terms, true); TopDocs search = searcher.search(query, 3); ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(3, scoreDocs.length); assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); } { BooleanQuery.Builder query = new BooleanQuery.Builder(); query.setDisableCoord(true); query.add(new TermQuery(new Term("firstname", "simon")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("surname", "simon")), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 1); ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); } reader.close(); w.close(); dir.close(); } public void testDismaxQuery() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); String[] username = new String[]{ "foo fighters", "some cool fan", "cover band"}; String[] song = new String[]{ "generator", "foo fighers - generator", "foo fighters generator" }; final boolean omitNorms = random().nextBoolean(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setIndexOptions(random().nextBoolean() ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS); ft.setOmitNorms(omitNorms); ft.freeze(); FieldType ft1 = new FieldType(TextField.TYPE_NOT_STORED); ft1.setIndexOptions(random().nextBoolean() ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS); ft1.setOmitNorms(omitNorms); ft1.freeze(); for (int i = 0; i < username.length; i++) { Document d = new Document(); d.add(new TextField("id", Integer.toString(i), Field.Store.YES)); d.add(new Field("username", username[i], ft)); d.add(new Field("song", song[i], ft)); w.addDocument(d); } int iters = scaledRandomIntBetween(25, 100); for (int j = 0; j < iters; j++) { Document d = new Document(); d.add(new TextField("id", Integer.toString(username.length + j), Field.Store.YES)); d.add(new Field("username", "foo fighters", ft1)); d.add(new Field("song", "some bogus text to bump up IDF", ft1)); w.addDocument(d); } w.commit(); DirectoryReader reader = DirectoryReader.open(w); IndexSearcher searcher = setSimilarity(newSearcher(reader)); { String[] fields = new String[]{"username", "song"}; BooleanQuery.Builder query = new BooleanQuery.Builder(); query.setDisableCoord(true); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "fighters"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 10); ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); } { BooleanQuery.Builder query = new BooleanQuery.Builder(); query.setDisableCoord(true); DisjunctionMaxQuery uname = new DisjunctionMaxQuery( Arrays.asList(new TermQuery(new Term("username", "foo")), new TermQuery(new Term("song", "foo"))), 0.0f); DisjunctionMaxQuery s = new DisjunctionMaxQuery( Arrays.asList(new TermQuery(new Term("username", "fighers")), new TermQuery(new Term("song", "fighers"))), 0.0f); DisjunctionMaxQuery gen = new DisjunctionMaxQuery( Arrays.asList(new TermQuery(new Term("username", "generator")), new TermQuery(new Term("song", "generator"))), 0f); query.add(uname, BooleanClause.Occur.SHOULD); query.add(s, BooleanClause.Occur.SHOULD); query.add(gen, BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 4); ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); } reader.close(); w.close(); dir.close(); } public void testBasics() { final int iters = scaledRandomIntBetween(5, 25); for (int j = 0; j < iters; j++) { String[] fields = new String[1 + random().nextInt(10)]; for (int i = 0; i < fields.length; i++) { fields[i] = TestUtil.randomRealisticUnicodeString(random(), 1, 10); } String term = TestUtil.randomRealisticUnicodeString(random(), 1, 10); Term[] terms = toTerms(fields, term); boolean disableCoord = random().nextBoolean(); boolean useBoolean = random().nextBoolean(); float tieBreaker = random().nextFloat(); BlendedTermQuery query = useBoolean ? BlendedTermQuery.booleanBlendedQuery(terms, disableCoord) : BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); QueryUtils.check(query); terms = toTerms(fields, term); BlendedTermQuery query2 = useBoolean ? BlendedTermQuery.booleanBlendedQuery(terms, disableCoord) : BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); assertEquals(query, query2); } } public Term[] toTerms(String[] fields, String term) { Term[] terms = new Term[fields.length]; List<String> fieldsList = Arrays.asList(fields); Collections.shuffle(fieldsList, random()); fields = fieldsList.toArray(new String[0]); for (int i = 0; i < fields.length; i++) { terms[i] = new Term(fields[i], term); } return terms; } public IndexSearcher setSimilarity(IndexSearcher searcher) { Similarity similarity = random().nextBoolean() ? new BM25Similarity() : new ClassicSimilarity(); searcher.setSimilarity(similarity); return searcher; } public void testExtractTerms() throws IOException { Set<Term> terms = new HashSet<>(); int num = scaledRandomIntBetween(1, 10); for (int i = 0; i < num; i++) { terms.add(new Term(TestUtil.randomRealisticUnicodeString(random(), 1, 10), TestUtil.randomRealisticUnicodeString(random(), 1, 10))); } BlendedTermQuery blendedTermQuery = random().nextBoolean() ? BlendedTermQuery.dismaxBlendedQuery(terms.toArray(new Term[0]), random().nextFloat()) : BlendedTermQuery.booleanBlendedQuery(terms.toArray(new Term[0]), random().nextBoolean()); Set<Term> extracted = new HashSet<>(); IndexSearcher searcher = new IndexSearcher(new MultiReader()); searcher.createNormalizedWeight(blendedTermQuery, false).extractTerms(extracted); assertThat(extracted.size(), equalTo(terms.size())); assertThat(extracted, containsInAnyOrder(terms.toArray(new Term[0]))); } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package io.trivium.dep.com.google.common.primitives; import static io.trivium.dep.com.google.common.base.Preconditions.checkArgument; import static io.trivium.dep.com.google.common.base.Preconditions.checkNotNull; import io.trivium.dep.com.google.common.annotations.Beta; import io.trivium.dep.com.google.common.annotations.GwtCompatible; import java.math.BigInteger; import java.util.Arrays; import java.util.Comparator; import javax.annotation.CheckReturnValue; /** * Static utility methods pertaining to {@code long} primitives that interpret values as * <i>unsigned</i> (that is, any negative value {@code x} is treated as the positive value * {@code 2^64 + x}). The methods for which signedness is not an issue are in {@link Longs}, as * well as signed versions of methods for which signedness is an issue. * * <p>In addition, this class provides several static methods for converting a {@code long} to a * {@code String} and a {@code String} to a {@code long} that treat the {@code long} as an unsigned * number. * * <p>Users of these utilities must be <i>extremely careful</i> not to mix up signed and unsigned * {@code long} values. When possible, it is recommended that the {@link UnsignedLong} wrapper * class be used, at a small efficiency penalty, to enforce the distinction in the type system. * * <p>See the Guava User Guide article on <a href= * "https://github.com/google/guava/wiki/PrimitivesExplained#unsigned-support"> * unsigned primitive utilities</a>. * * @author Louis Wasserman * @author Brian Milch * @author Colin Evans * @since 10.0 */ @Beta @GwtCompatible public final class UnsignedLongs { private UnsignedLongs() {} public static final long MAX_VALUE = -1L; // Equivalent to 2^64 - 1 /** * A (self-inverse) bijection which converts the ordering on unsigned longs to the ordering on * longs, that is, {@code a <= b} as unsigned longs if and only if {@code flip(a) <= flip(b)} * as signed longs. */ private static long flip(long a) { return a ^ Long.MIN_VALUE; } /** * Compares the two specified {@code long} values, treating them as unsigned values between * {@code 0} and {@code 2^64 - 1} inclusive. * * @param a the first unsigned {@code long} to compare * @param b the second unsigned {@code long} to compare * @return a negative value if {@code a} is less than {@code b}; a positive value if {@code a} is * greater than {@code b}; or zero if they are equal */ @CheckReturnValue public static int compare(long a, long b) { return Longs.compare(flip(a), flip(b)); } /** * Returns the least value present in {@code array}, treating values as unsigned. * * @param array a <i>nonempty</i> array of unsigned {@code long} values * @return the value present in {@code array} that is less than or equal to every other value in * the array according to {@link #compare} * @throws IllegalArgumentException if {@code array} is empty */ @CheckReturnValue public static long min(long... array) { checkArgument(array.length > 0); long min = flip(array[0]); for (int i = 1; i < array.length; i++) { long next = flip(array[i]); if (next < min) { min = next; } } return flip(min); } /** * Returns the greatest value present in {@code array}, treating values as unsigned. * * @param array a <i>nonempty</i> array of unsigned {@code long} values * @return the value present in {@code array} that is greater than or equal to every other value * in the array according to {@link #compare} * @throws IllegalArgumentException if {@code array} is empty */ @CheckReturnValue public static long max(long... array) { checkArgument(array.length > 0); long max = flip(array[0]); for (int i = 1; i < array.length; i++) { long next = flip(array[i]); if (next > max) { max = next; } } return flip(max); } /** * Returns a string containing the supplied unsigned {@code long} values separated by * {@code separator}. For example, {@code join("-", 1, 2, 3)} returns the string {@code "1-2-3"}. * * @param separator the text that should appear between consecutive values in the resulting * string (but not at the start or end) * @param array an array of unsigned {@code long} values, possibly empty */ @CheckReturnValue public static String join(String separator, long... array) { checkNotNull(separator); if (array.length == 0) { return ""; } // For pre-sizing a builder, just get the right order of magnitude StringBuilder builder = new StringBuilder(array.length * 5); builder.append(toString(array[0])); for (int i = 1; i < array.length; i++) { builder.append(separator).append(toString(array[i])); } return builder.toString(); } /** * Returns a comparator that compares two arrays of unsigned {@code long} values * lexicographically. That is, it compares, using {@link #compare(long, long)}), the first pair of * values that follow any common prefix, or when one array is a prefix of the other, treats the * shorter array as the lesser. For example, {@code [] < [1L] < [1L, 2L] < [2L] < [1L << 63]}. * * <p>The returned comparator is inconsistent with {@link Object#equals(Object)} (since arrays * support only identity equality), but it is consistent with * {@link Arrays#equals(long[], long[])}. * * @see <a href="http://en.wikipedia.org/wiki/Lexicographical_order">Lexicographical order * article at Wikipedia</a> */ @CheckReturnValue public static Comparator<long[]> lexicographicalComparator() { return LexicographicalComparator.INSTANCE; } enum LexicographicalComparator implements Comparator<long[]> { INSTANCE; @Override public int compare(long[] left, long[] right) { int minLength = Math.min(left.length, right.length); for (int i = 0; i < minLength; i++) { if (left[i] != right[i]) { return UnsignedLongs.compare(left[i], right[i]); } } return left.length - right.length; } } /** * Returns dividend / divisor, where the dividend and divisor are treated as unsigned 64-bit * quantities. * * @param dividend the dividend (numerator) * @param divisor the divisor (denominator) * @throws ArithmeticException if divisor is 0 */ @CheckReturnValue public static long divide(long dividend, long divisor) { if (divisor < 0) { // i.e., divisor >= 2^63: if (compare(dividend, divisor) < 0) { return 0; // dividend < divisor } else { return 1; // dividend >= divisor } } // Optimization - use signed division if dividend < 2^63 if (dividend >= 0) { return dividend / divisor; } /* * Otherwise, approximate the quotient, check, and correct if necessary. Our approximation is * guaranteed to be either exact or one less than the correct value. This follows from fact * that floor(floor(x)/i) == floor(x/i) for any real x and integer i != 0. The proof is not * quite trivial. */ long quotient = ((dividend >>> 1) / divisor) << 1; long rem = dividend - quotient * divisor; return quotient + (compare(rem, divisor) >= 0 ? 1 : 0); } /** * Returns dividend % divisor, where the dividend and divisor are treated as unsigned 64-bit * quantities. * * @param dividend the dividend (numerator) * @param divisor the divisor (denominator) * @throws ArithmeticException if divisor is 0 * @since 11.0 */ @CheckReturnValue public static long remainder(long dividend, long divisor) { if (divisor < 0) { // i.e., divisor >= 2^63: if (compare(dividend, divisor) < 0) { return dividend; // dividend < divisor } else { return dividend - divisor; // dividend >= divisor } } // Optimization - use signed modulus if dividend < 2^63 if (dividend >= 0) { return dividend % divisor; } /* * Otherwise, approximate the quotient, check, and correct if necessary. Our approximation is * guaranteed to be either exact or one less than the correct value. This follows from fact * that floor(floor(x)/i) == floor(x/i) for any real x and integer i != 0. The proof is not * quite trivial. */ long quotient = ((dividend >>> 1) / divisor) << 1; long rem = dividend - quotient * divisor; return rem - (compare(rem, divisor) >= 0 ? divisor : 0); } /** * Returns the unsigned {@code long} value represented by the given decimal string. * * @throws NumberFormatException if the string does not contain a valid unsigned {@code long} * value * @throws NullPointerException if {@code s} is null * (in contrast to {@link Long#parseLong(String)}) */ public static long parseUnsignedLong(String s) { return parseUnsignedLong(s, 10); } /** * Returns the unsigned {@code long} value represented by the given string. * * Accepts a decimal, hexadecimal, or octal number given by specifying the following prefix: * * <ul> * <li>{@code 0x}<i>HexDigits</i> * <li>{@code 0X}<i>HexDigits</i> * <li>{@code #}<i>HexDigits</i> * <li>{@code 0}<i>OctalDigits</i> * </ul> * * @throws NumberFormatException if the string does not contain a valid unsigned {@code long} * value * @since 13.0 */ public static long decode(String stringValue) { ParseRequest request = ParseRequest.fromString(stringValue); try { return parseUnsignedLong(request.rawValue, request.radix); } catch (NumberFormatException e) { NumberFormatException decodeException = new NumberFormatException("Error parsing value: " + stringValue); decodeException.initCause(e); throw decodeException; } } /** * Returns the unsigned {@code long} value represented by a string with the given radix. * * @param s the string containing the unsigned {@code long} representation to be parsed. * @param radix the radix to use while parsing {@code s} * @throws NumberFormatException if the string does not contain a valid unsigned {@code long} * with the given radix, or if {@code radix} is not between {@link Character#MIN_RADIX} * and {@link Character#MAX_RADIX}. * @throws NullPointerException if {@code s} is null * (in contrast to {@link Long#parseLong(String)}) */ public static long parseUnsignedLong(String s, int radix) { checkNotNull(s); if (s.length() == 0) { throw new NumberFormatException("empty string"); } if (radix < Character.MIN_RADIX || radix > Character.MAX_RADIX) { throw new NumberFormatException("illegal radix: " + radix); } int max_safe_pos = maxSafeDigits[radix] - 1; long value = 0; for (int pos = 0; pos < s.length(); pos++) { int digit = Character.digit(s.charAt(pos), radix); if (digit == -1) { throw new NumberFormatException(s); } if (pos > max_safe_pos && overflowInParse(value, digit, radix)) { throw new NumberFormatException("Too large for unsigned long: " + s); } value = (value * radix) + digit; } return value; } /** * Returns true if (current * radix) + digit is a number too large to be represented by an * unsigned long. This is useful for detecting overflow while parsing a string representation of * a number. Does not verify whether supplied radix is valid, passing an invalid radix will give * undefined results or an ArrayIndexOutOfBoundsException. */ private static boolean overflowInParse(long current, int digit, int radix) { if (current >= 0) { if (current < maxValueDivs[radix]) { return false; } if (current > maxValueDivs[radix]) { return true; } // current == maxValueDivs[radix] return (digit > maxValueMods[radix]); } // current < 0: high bit is set return true; } /** * Returns a string representation of x, where x is treated as unsigned. */ @CheckReturnValue public static String toString(long x) { return toString(x, 10); } /** * Returns a string representation of {@code x} for the given radix, where {@code x} is treated * as unsigned. * * @param x the value to convert to a string. * @param radix the radix to use while working with {@code x} * @throws IllegalArgumentException if {@code radix} is not between {@link Character#MIN_RADIX} * and {@link Character#MAX_RADIX}. */ @CheckReturnValue public static String toString(long x, int radix) { checkArgument( radix >= Character.MIN_RADIX && radix <= Character.MAX_RADIX, "radix (%s) must be between Character.MIN_RADIX and Character.MAX_RADIX", radix); if (x == 0) { // Simply return "0" return "0"; } else { char[] buf = new char[64]; int i = buf.length; if (x < 0) { // Separate off the last digit using unsigned division. That will leave // a number that is nonnegative as a signed integer. long quotient = divide(x, radix); long rem = x - quotient * radix; buf[--i] = Character.forDigit((int) rem, radix); x = quotient; } // Simple modulo/division approach while (x > 0) { buf[--i] = Character.forDigit((int) (x % radix), radix); x /= radix; } // Generate string return new String(buf, i, buf.length - i); } } // calculated as 0xffffffffffffffff / radix private static final long[] maxValueDivs = new long[Character.MAX_RADIX + 1]; private static final int[] maxValueMods = new int[Character.MAX_RADIX + 1]; private static final int[] maxSafeDigits = new int[Character.MAX_RADIX + 1]; static { BigInteger overflow = new BigInteger("10000000000000000", 16); for (int i = Character.MIN_RADIX; i <= Character.MAX_RADIX; i++) { maxValueDivs[i] = divide(MAX_VALUE, i); maxValueMods[i] = (int) remainder(MAX_VALUE, i); maxSafeDigits[i] = overflow.toString(i).length() - 1; } } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.plugins; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.SafeJdomFactory; import org.jdom.Content; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.Namespace; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; @SuppressWarnings("DuplicatedCode") final class PathBasedJdomXIncluder<T> { private static final Logger LOG = Logger.getInstance(PathBasedJdomXIncluder.class); public static final PathResolver<Path> DEFAULT_PATH_RESOLVER = new BasePathResolver(); private static final @NonNls String INCLUDE = "include"; private static final @NonNls String HREF = "href"; private static final @NonNls String BASE = "base"; private static final @NonNls String PARSE = "parse"; private static final @NonNls String XML = "xml"; private static final @NonNls String XPOINTER = "xpointer"; private final DescriptorListLoadingContext context; private final PathResolver<T> pathResolver; private PathBasedJdomXIncluder(@NotNull DescriptorListLoadingContext context, @NotNull PathResolver<T> pathResolver) { this.context = context; this.pathResolver = pathResolver; } /** * Original element will be mutated in place. */ public static <T> void resolveNonXIncludeElement(@NotNull Element original, @Nullable Path base, @NotNull DescriptorListLoadingContext context, @NotNull PathResolver<T> pathResolver) { LOG.assertTrue(!isIncludeElement(original)); new PathBasedJdomXIncluder<>(context, pathResolver).resolveNonXIncludeElement(original, pathResolver.createNewStack(base)); } private static boolean isIncludeElement(Element element) { return element.getName().equals(INCLUDE) && element.getNamespace().equals(JDOMUtil.XINCLUDE_NAMESPACE); } private @NotNull List<Element> resolveXIncludeElement(@NotNull Element linkElement, @NotNull List<T> bases, @Nullable ArrayList<Element> result) { String relativePath = linkElement.getAttributeValue(HREF); if (relativePath == null) { throw new RuntimeException("Missing href attribute"); } String parseAttribute = linkElement.getAttributeValue(PARSE); if (parseAttribute != null) { LOG.assertTrue(parseAttribute.equals(XML), parseAttribute + " is not a legal value for the parse attribute"); } Element remoteParsed = loadXIncludeReference(bases, relativePath, linkElement); if (remoteParsed != null) { String xpointer = linkElement.getAttributeValue(XPOINTER); if (xpointer != null) { remoteParsed = extractNeededChildren(remoteParsed, xpointer); } } if (remoteParsed == null) { return result == null ? Collections.emptyList() : result; } if (result == null) { result = new ArrayList<>(remoteParsed.getContentSize()); } else { result.ensureCapacity(result.size() + remoteParsed.getContentSize()); } Iterator<Content> iterator = remoteParsed.getContent().iterator(); while (iterator.hasNext()) { Content content = iterator.next(); if (!(content instanceof Element)) { continue; } iterator.remove(); Element element = (Element)content; if (isIncludeElement(element)) { resolveXIncludeElement(element, bases, result); } else { resolveNonXIncludeElement(element, bases); result.add(element); } } return result; } private static @Nullable Element extractNeededChildren(@NotNull Element remoteElement, @NotNull String xpointer) { Matcher matcher = JDOMUtil.XPOINTER_PATTERN.matcher(xpointer); if (!matcher.matches()) { throw new RuntimeException("Unsupported XPointer: " + xpointer); } String pointer = matcher.group(1); matcher = JDOMUtil.CHILDREN_PATTERN.matcher(pointer); if (!matcher.matches()) { throw new RuntimeException("Unsupported pointer: " + pointer); } Element result = remoteElement; if (!result.getName().equals(matcher.group(1))) { return null; } String subTagName = matcher.group(2); if (subTagName != null) { // cut off the slash result = result.getChild(subTagName.substring(1)); assert result != null; } return result; } private @Nullable Element loadXIncludeReference(@NotNull List<T> bases, @NotNull String relativePath, @NotNull Element referrerElement) { int baseStackSize = bases.size(); try { String base = referrerElement.getAttributeValue(BASE, Namespace.XML_NAMESPACE); if (base != null) { // to simplify implementation, no need to support obscure and not used base attribute LOG.error("Do not use xml:base attribute: " + base); } Element root = pathResolver.loadXIncludeReference(bases, relativePath, base, context.getXmlFactory()); if (isIncludeElement(root)) { throw new UnsupportedOperationException("root tag of remote cannot be include"); } else { resolveNonXIncludeElement(root, bases); } return root; } catch (JDOMException e) { throw new RuntimeException(e); } catch (IOException e) { Element fallbackElement = referrerElement.getChild("fallback", referrerElement.getNamespace()); if (fallbackElement != null) { // we don't have fallback elements with content ATM return null; } else if (context.ignoreMissingInclude) { LOG.info(relativePath + " include ignored: " + e.getMessage()); return null; } else { throw new RuntimeException(e); } } finally { // stack not modified, if, for example, pathResolver resolves element not via filesystem if (baseStackSize != bases.size()) { bases.remove(bases.size() - 1); } } } private void resolveNonXIncludeElement(@NotNull Element original, @NotNull List<T> bases) { List<Content> contentList = original.getContent(); for (int i = contentList.size() - 1; i >= 0; i--) { Content content = contentList.get(i); if (!(content instanceof Element)) { continue; } Element element = (Element)content; if (isIncludeElement(element)) { original.setContent(i, resolveXIncludeElement(element, bases, null)); } else { // process child element to resolve possible includes resolveNonXIncludeElement(element, bases); } } } interface PathResolver<T> { default boolean isFlat() { return false; } @NotNull Element loadXIncludeReference(@NotNull List<T> bases, @NotNull String relativePath, @Nullable String base, @NotNull SafeJdomFactory jdomFactory) throws IOException, JDOMException; @NotNull Element resolvePath(@NotNull Path basePath, @NotNull String relativePath, @NotNull SafeJdomFactory jdomFactory) throws IOException, JDOMException; @NotNull List<T> createNewStack(@Nullable Path base); } }
/* * Certain versions of software and/or documents ("Material") accessible here may contain branding from * Hewlett-Packard Company (now HP Inc.) and Hewlett Packard Enterprise Company. As of September 1, 2017, * the Material is now offered by Micro Focus, a separately owned and operated company. Any reference to the HP * and Hewlett Packard Enterprise/HPE marks is historical in nature, and the HP and Hewlett Packard Enterprise/HPE * marks are the property of their respective owners. * __________________________________________________________________ * MIT License * * (c) Copyright 2012-2019 Micro Focus or one of its affiliates. * * The only warranties for products and services of Micro Focus and its affiliates * and licensors ("Micro Focus") are set forth in the express warranty statements * accompanying such products and services. Nothing herein should be construed as * constituting an additional warranty. Micro Focus shall not be liable for technical * or editorial errors or omissions contained herein. * The information contained herein is subject to change without notice. * ___________________________________________________________________ */ package com.microfocus.application.automation.tools.octane.actions.build; import com.hp.octane.integrations.dto.DTOFactory; import com.hp.octane.integrations.dto.causes.CIEventCauseType; import com.hp.octane.integrations.dto.parameters.CIParameter; import com.hp.octane.integrations.dto.parameters.CIParameterType; import com.hp.octane.integrations.dto.snapshots.CIBuildResult; import com.hp.octane.integrations.dto.snapshots.CIBuildStatus; import com.hp.octane.integrations.dto.snapshots.SnapshotNode; import com.microfocus.application.automation.tools.octane.OctanePluginTestBase; import com.microfocus.application.automation.tools.octane.actions.Utils; import com.microfocus.application.automation.tools.octane.tests.TestUtils; import hudson.model.*; import hudson.plugins.parameterizedtrigger.*; import org.junit.Test; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.UUID; import static org.junit.Assert.*; /** * Created with IntelliJ IDEA. * User: gullery * Date: 13/01/15 * Time: 11:46 * To change this template use File | Settings | File Templates. */ public class BuildActionsFreeStyleTest extends OctanePluginTestBase { private static final DTOFactory dtoFactory = DTOFactory.getInstance(); // Snapshot: free-style, no params, no children // @Test public void testFreeStyleNoParamsNoChildren() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); int retries = 0; FreeStyleProject p = rule.createFreeStyleProject(projectName); assertEquals(p.getBuilds().toArray().length, 0); Utils.buildProject(client, p); while ((p.getLastBuild() == null || p.getLastBuild().isBuilding()) && ++retries < 40) { Thread.sleep(500); } assertEquals(p.getBuilds().toArray().length, 1); String taskUrl = "nga/api/v1/jobs/" + projectName + "/builds/" + p.getLastBuild().getNumber(); SnapshotNode snapshot = TestUtils.sendTask(taskUrl, SnapshotNode.class); assertEquals(projectName, snapshot.getJobCiId()); assertEquals(projectName, snapshot.getName()); assertEquals(0, snapshot.getParameters().size()); assertEquals(0, snapshot.getPhasesInternal().size()); assertEquals(0, snapshot.getPhasesPostBuild().size()); assertEquals(1, snapshot.getCauses().size()); assertEquals(CIEventCauseType.USER, snapshot.getCauses().get(0).getType()); assertEquals(String.valueOf(p.getLastBuild().getNumber()), snapshot.getNumber()); assertEquals(CIBuildStatus.FINISHED, snapshot.getStatus()); assertEquals(CIBuildResult.SUCCESS, snapshot.getResult()); assertNotNull(snapshot.getStartTime()); assertNotNull(snapshot.getDuration()); assertNotNull(snapshot.getEstimatedDuration()); } // Snapshot: free-style, with params, no children // @Test public void testFreeStyleWithParamsNoChildren() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); int retries = 0; FreeStyleProject p = rule.createFreeStyleProject(projectName); ParametersDefinitionProperty params = new ParametersDefinitionProperty(Arrays.asList( (ParameterDefinition) new BooleanParameterDefinition("ParamA", true, "bool"), (ParameterDefinition) new StringParameterDefinition("ParamB", "str", "string"), (ParameterDefinition) new TextParameterDefinition("ParamC", "txt", "text"), (ParameterDefinition) new ChoiceParameterDefinition("ParamD", new String[]{"one", "two", "three"}, "choice"), (ParameterDefinition) new FileParameterDefinition("ParamE", "file param") )); p.addProperty(params); CIParameter tmpParam; assertEquals(p.getBuilds().toArray().length, 0); Utils.buildProjectWithParams(client, p, "ParamA=false&ParamD=two&ParamX=some_string"); while ((p.getLastBuild() == null || p.getLastBuild().isBuilding()) && ++retries < 40) { Thread.sleep(500); } assertEquals(p.getBuilds().toArray().length, 1); String taskUrl = "nga/api/v1/jobs/" + projectName + "/builds/" + p.getLastBuild().getNumber(); SnapshotNode snapshot = TestUtils.sendTask(taskUrl, SnapshotNode.class); assertEquals(projectName, snapshot.getJobCiId()); assertEquals(projectName, snapshot.getName()); assertEquals(5, snapshot.getParameters().size()); assertEquals(0, snapshot.getPhasesInternal().size()); assertEquals(0, snapshot.getPhasesPostBuild().size()); assertEquals(1, snapshot.getCauses().size()); assertEquals(CIEventCauseType.USER, snapshot.getCauses().get(0).getType()); assertEquals(String.valueOf(p.getLastBuild().getNumber()), snapshot.getNumber()); assertEquals(CIBuildStatus.FINISHED, snapshot.getStatus()); assertEquals(CIBuildResult.SUCCESS, snapshot.getResult()); assertNotNull(snapshot.getStartTime()); assertNotNull(snapshot.getDuration()); assertNotNull(snapshot.getEstimatedDuration()); tmpParam = snapshot.getParameters().get(0); assertEquals("ParamA", tmpParam.getName()); assertEquals(CIParameterType.BOOLEAN, tmpParam.getType()); assertEquals("bool", tmpParam.getDescription()); assertEquals(true, tmpParam.getDefaultValue()); assertEquals("false", tmpParam.getValue()); assertNull(tmpParam.getChoices()); tmpParam = snapshot.getParameters().get(1); assertEquals("ParamB", tmpParam.getName()); assertEquals(CIParameterType.STRING, tmpParam.getType()); assertEquals("string", tmpParam.getDescription()); assertEquals("str", tmpParam.getDefaultValue()); assertEquals("str", tmpParam.getValue()); assertNull(tmpParam.getChoices()); tmpParam = snapshot.getParameters().get(2); assertEquals("ParamC", tmpParam.getName()); assertEquals(CIParameterType.STRING, tmpParam.getType()); assertEquals("text", tmpParam.getDescription()); assertEquals("txt", tmpParam.getDefaultValue()); assertEquals("txt", tmpParam.getValue()); tmpParam = snapshot.getParameters().get(3); assertEquals("ParamD", tmpParam.getName()); assertEquals(CIParameterType.STRING, tmpParam.getType()); assertEquals("choice", tmpParam.getDescription()); assertEquals("one", tmpParam.getDefaultValue()); assertEquals("two", tmpParam.getValue()); tmpParam = snapshot.getParameters().get(4); assertEquals("ParamE", tmpParam.getName()); assertEquals(CIParameterType.FILE, tmpParam.getType()); assertEquals("file param", tmpParam.getDescription()); assertEquals("", tmpParam.getDefaultValue()); assertEquals(null, tmpParam.getValue()); } // Snapshot: free-style, with params, with children // @Test public void testFreeStyleWithParamsWithChildren() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); int retries = 0; rule.jenkins.setNumExecutors(10); rule.jenkins.setNodes(rule.jenkins.getNodes()); FreeStyleProject p = rule.createFreeStyleProject(projectName); createProjectStructure(p); FreeStyleProject lastToBeBuilt = (FreeStyleProject) rule.jenkins.getItem("jobCC"); ParametersDefinitionProperty params = new ParametersDefinitionProperty(Arrays.asList( (ParameterDefinition) new BooleanParameterDefinition("ParamA", true, "bool"), (ParameterDefinition) new StringParameterDefinition("ParamB", "str", "string") )); p.addProperty(params); assertEquals(p.getBuilds().toArray().length, 0); Utils.buildProjectWithParams(client, p, "ParamA=false&ParamC=not_exists"); while ((lastToBeBuilt.getLastBuild() == null || lastToBeBuilt.getLastBuild().getNumber() < 6 || lastToBeBuilt.getLastBuild().isBuilding()) && retries++ < 100) { Thread.sleep(500); } assertEquals(p.getBuilds().toArray().length, 1); String taskUrl = "nga/api/v1/jobs/" + projectName + "/builds/" + p.getLastBuild().getNumber(); SnapshotNode snapshot = TestUtils.sendTask(taskUrl, SnapshotNode.class); assertEquals(projectName, snapshot.getJobCiId()); assertEquals(projectName, snapshot.getName()); assertEquals(2, snapshot.getParameters().size()); assertEquals(2, snapshot.getPhasesInternal().size()); assertEquals(2, snapshot.getPhasesPostBuild().size()); assertEquals(1, snapshot.getCauses().size()); assertEquals(CIEventCauseType.USER, snapshot.getCauses().get(0).getType()); assertEquals(String.valueOf(p.getLastBuild().getNumber()), snapshot.getNumber()); assertEquals(CIBuildStatus.FINISHED, snapshot.getStatus()); assertEquals(CIBuildResult.SUCCESS, snapshot.getResult()); assertNotNull(snapshot.getStartTime()); assertNotNull(snapshot.getDuration()); assertNotNull(snapshot.getEstimatedDuration()); } private void createProjectStructure(FreeStyleProject project) throws IOException { FreeStyleProject jobA = rule.createFreeStyleProject("jobA"); FreeStyleProject jobB = rule.createFreeStyleProject("jobB"); FreeStyleProject jobC = rule.createFreeStyleProject("jobC"); FreeStyleProject jobAA = rule.createFreeStyleProject("jobAA"); FreeStyleProject jobBB = rule.createFreeStyleProject("jobBB"); FreeStyleProject jobCC = rule.createFreeStyleProject("jobCC"); // jobA jobA.getBuildersList().add(Utils.getSleepScript(5)); jobA.getBuildersList().add(new TriggerBuilder(Collections.singletonList( new BlockableBuildTriggerConfig("jobAA, jobC", new BlockingBehaviour( Result.FAILURE, Result.FAILURE, Result.UNSTABLE ), null) ))); // jobB jobB.getBuildersList().add(Utils.getSleepScript(2)); jobB.getPublishersList().add(new hudson.tasks.BuildTrigger("jobBB, jobC", Result.SUCCESS)); // jobC jobC.getBuildersList().add(Utils.getSleepScript(5)); jobC.getPublishersList().add(new hudson.plugins.parameterizedtrigger.BuildTrigger(Collections.singletonList( new BuildTriggerConfig("jobCC", ResultCondition.ALWAYS, true, null) ))); jobAA.getBuildersList().add(Utils.getSleepScript(2)); jobBB.getBuildersList().add(Utils.getSleepScript(4)); jobCC.getBuildersList().add(Utils.getSleepScript(3)); // root job config project.getBuildersList().add(new TriggerBuilder(Arrays.asList( new BlockableBuildTriggerConfig("jobA, jobB", new BlockingBehaviour( Result.FAILURE, Result.FAILURE, Result.UNSTABLE ), Arrays.asList(new AbstractBuildParameters[0])), new BlockableBuildTriggerConfig("jobC", new BlockingBehaviour( Result.FAILURE, Result.FAILURE, Result.UNSTABLE ), Arrays.asList(new AbstractBuildParameters[0])) ))); project.getPublishersList().add(new hudson.tasks.BuildTrigger("jobA, jobB", Result.SUCCESS)); project.getPublishersList().add(new hudson.plugins.parameterizedtrigger.BuildTrigger(Collections.singletonList( new BuildTriggerConfig("jobC", ResultCondition.ALWAYS, true, null) ))); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import org.apache.commons.logging.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.mapred.JobTracker.JobTrackerMetrics; import org.apache.hadoop.mapred.JobHistory.Values ; import java.io.*; import java.net.*; import java.util.*; /////////////////////////////////////////////////////// // JobInProgress maintains all the info for keeping // a Job on the straight and narrow. It keeps its JobProfile // and its latest JobStatus, plus a set of tables for // doing bookkeeping of its Tasks. /////////////////////////////////////////////////////// class JobInProgress { private static final Log LOG = LogFactory.getLog("org.apache.hadoop.mapred.JobInProgress"); JobProfile profile; JobStatus status; Path localJobFile = null; Path localJarFile = null; TaskInProgress maps[] = new TaskInProgress[0]; TaskInProgress reduces[] = new TaskInProgress[0]; int numMapTasks = 0; int numReduceTasks = 0; int runningMapTasks = 0; int runningReduceTasks = 0; int finishedMapTasks = 0; int finishedReduceTasks = 0; int failedMapTasks = 0 ; int failedReduceTasks = 0 ; JobTracker jobtracker = null; HashMap hostToMaps = new HashMap(); private int taskCompletionEventTracker = 0 ; List<TaskCompletionEvent> taskCompletionEvents ; long startTime; long finishTime; private JobConf conf; boolean tasksInited = false; private LocalFileSystem localFs; private String uniqueString; /** * Create a JobInProgress with the given job file, plus a handle * to the tracker. */ public JobInProgress(String jobFile, JobTracker jobtracker, Configuration default_conf) throws IOException { uniqueString = jobtracker.createUniqueId(); String jobid = "job_" + uniqueString; String url = "http://" + jobtracker.getJobTrackerMachine() + ":" + jobtracker.getInfoPort() + "/jobdetails.jsp?jobid=" + jobid; this.jobtracker = jobtracker; this.status = new JobStatus(jobid, 0.0f, 0.0f, JobStatus.PREP); this.startTime = System.currentTimeMillis(); this.localFs = (LocalFileSystem)FileSystem.getLocal(default_conf); JobConf default_job_conf = new JobConf(default_conf); this.localJobFile = default_job_conf.getLocalPath(JobTracker.SUBDIR +"/"+jobid + ".xml"); this.localJarFile = default_job_conf.getLocalPath(JobTracker.SUBDIR +"/"+ jobid + ".jar"); FileSystem fs = FileSystem.get(default_conf); fs.copyToLocalFile(new Path(jobFile), localJobFile); conf = new JobConf(localJobFile); this.profile = new JobProfile(conf.getUser(), jobid, jobFile, url, conf.getJobName()); String jarFile = conf.getJar(); if (jarFile != null) { fs.copyToLocalFile(new Path(jarFile), localJarFile); conf.setJar(localJarFile.toString()); } this.numMapTasks = conf.getNumMapTasks(); this.numReduceTasks = conf.getNumReduceTasks(); this.taskCompletionEvents = new ArrayList( numMapTasks + numReduceTasks + 10); JobHistory.JobInfo.logSubmitted(jobid, conf.getJobName(), conf.getUser(), System.currentTimeMillis(), jobFile); } /** * Construct the splits, etc. This is invoked from an async * thread so that split-computation doesn't block anyone. */ public synchronized void initTasks() throws IOException { if (tasksInited) { return; } // // construct input splits // String jobFile = profile.getJobFile(); FileSystem fs = FileSystem.get(conf); if (localJarFile != null) { ClassLoader loader = new URLClassLoader(new URL[]{ localFs.pathToFile(localJarFile).toURL() }); conf.setClassLoader(loader); } InputFormat inputFormat = conf.getInputFormat(); InputSplit[] splits = inputFormat.getSplits(conf, numMapTasks); // // sort splits by decreasing length, to reduce job's tail // Arrays.sort(splits, new Comparator() { public int compare(Object a, Object b) { long diff = ((FileSplit)b).getLength() - ((FileSplit)a).getLength(); return diff==0 ? 0 : (diff > 0 ? 1 : -1); } }); // // adjust number of map tasks to actual number of splits // this.numMapTasks = splits.length; // if no split is returned, job is considered completed and successful if (numMapTasks == 0) { this.status = new JobStatus(status.getJobId(), 1.0f, 1.0f, JobStatus.SUCCEEDED); tasksInited = true; return; } // create a map task for each split this.maps = new TaskInProgress[numMapTasks]; for (int i = 0; i < numMapTasks; i++) { maps[i] = new TaskInProgress(uniqueString, jobFile, splits[i], jobtracker, conf, this, i); } // // Create reduce tasks // this.reduces = new TaskInProgress[numReduceTasks]; for (int i = 0; i < numReduceTasks; i++) { reduces[i] = new TaskInProgress(uniqueString, jobFile, numMapTasks, i, jobtracker, conf, this); } // // Obtain some tasktracker-cache information for the map task splits. // for (int i = 0; i < maps.length; i++) { String hints[] = splits[i].getLocations(); for (int k = 0; k < hints.length; k++) { ArrayList hostMaps = (ArrayList)hostToMaps.get(hints[k]); if (hostMaps == null) { hostMaps = new ArrayList(); hostToMaps.put(hints[k], hostMaps); } hostMaps.add(maps[i]); } } this.status = new JobStatus(status.getJobId(), 0.0f, 0.0f, JobStatus.RUNNING); tasksInited = true; JobHistory.JobInfo.logStarted(profile.getJobId(), System.currentTimeMillis(), numMapTasks, numReduceTasks); } ///////////////////////////////////////////////////// // Accessors for the JobInProgress ///////////////////////////////////////////////////// public JobProfile getProfile() { return profile; } public JobStatus getStatus() { return status; } public long getStartTime() { return startTime; } public long getFinishTime() { return finishTime; } public int desiredMaps() { return numMapTasks; } public int finishedMaps() { return finishedMapTasks; } public int desiredReduces() { return numReduceTasks; } public synchronized int runningMaps() { return runningMapTasks; } public synchronized int runningReduces() { return runningReduceTasks; } public int finishedReduces() { return finishedReduceTasks; } /** * Get the list of map tasks * @return the raw array of maps for this job */ TaskInProgress[] getMapTasks() { return maps; } /** * Get the list of reduce tasks * @return the raw array of reduce tasks for this job */ TaskInProgress[] getReduceTasks() { return reduces; } /** * Get the job configuration * @return the job's configuration */ JobConf getJobConf() { return conf; } /** * Return a treeset of completed TaskInProgress objects */ public Vector reportTasksInProgress(boolean shouldBeMap, boolean shouldBeComplete) { Vector results = new Vector(); TaskInProgress tips[] = null; if (shouldBeMap) { tips = maps; } else { tips = reduces; } for (int i = 0; i < tips.length; i++) { if (tips[i].isComplete() == shouldBeComplete) { results.add(tips[i]); } } return results; } //////////////////////////////////////////////////// // Status update methods //////////////////////////////////////////////////// public synchronized void updateTaskStatus(TaskInProgress tip, TaskStatus status, JobTrackerMetrics metrics) { double oldProgress = tip.getProgress(); // save old progress boolean wasRunning = tip.isRunning(); boolean wasComplete = tip.isComplete(); boolean change = tip.updateStatus(status); if (change) { TaskStatus.State state = status.getRunState(); TaskTrackerStatus ttStatus = this.jobtracker.getTaskTracker(status.getTaskTracker()); String httpTaskLogLocation = null; if( null != ttStatus ){ httpTaskLogLocation = "http://" + ttStatus.getHost() + ":" + ttStatus.getHttpPort() + "/tasklog.jsp?plaintext=true&taskid=" + status.getTaskId() + "&all=true"; } if (state == TaskStatus.State.SUCCEEDED) { this.taskCompletionEvents.add( new TaskCompletionEvent( taskCompletionEventTracker++, status.getTaskId(), TaskCompletionEvent.Status.SUCCEEDED, httpTaskLogLocation )); completedTask(tip, status, metrics); } else if (state == TaskStatus.State.FAILED || state == TaskStatus.State.KILLED) { this.taskCompletionEvents.add( new TaskCompletionEvent( taskCompletionEventTracker++, status.getTaskId(), TaskCompletionEvent.Status.FAILED, httpTaskLogLocation )); // Tell the job to fail the relevant task failedTask(tip, status.getTaskId(), status, status.getTaskTracker(), wasRunning, wasComplete); } } // // Update JobInProgress status // LOG.debug("Taking progress for " + tip.getTIPId() + " from " + oldProgress + " to " + tip.getProgress()); double progressDelta = tip.getProgress() - oldProgress; if (tip.isMapTask()) { if (maps.length == 0) { this.status.setMapProgress(1.0f); } else { this.status.setMapProgress((float) (this.status.mapProgress() + progressDelta / maps.length)); } } else { if (reduces.length == 0) { this.status.setReduceProgress(1.0f); } else { this.status.setReduceProgress ((float) (this.status.reduceProgress() + (progressDelta / reduces.length))); } } } ///////////////////////////////////////////////////// // Create/manage tasks ///////////////////////////////////////////////////// /** * Return a MapTask, if appropriate, to run on the given tasktracker */ public Task obtainNewMapTask(TaskTrackerStatus tts, int clusterSize) { if (! tasksInited) { LOG.info("Cannot create task split for " + profile.getJobId()); return null; } ArrayList mapCache = (ArrayList)hostToMaps.get(tts.getHost()); int target = findNewTask(tts, clusterSize, status.mapProgress(), maps, mapCache); if (target == -1) { return null; } boolean wasRunning = maps[target].isRunning(); Task result = maps[target].getTaskToRun(tts.getTrackerName()); if (!wasRunning) { runningMapTasks += 1; JobHistory.Task.logStarted(profile.getJobId(), maps[target].getTIPId(), Values.MAP.name(), System.currentTimeMillis()); } return result; } /** * Return a ReduceTask, if appropriate, to run on the given tasktracker. * We don't have cache-sensitivity for reduce tasks, as they * work on temporary MapRed files. */ public Task obtainNewReduceTask(TaskTrackerStatus tts, int clusterSize) { if (! tasksInited) { LOG.info("Cannot create task split for " + profile.getJobId()); return null; } int target = findNewTask(tts, clusterSize, status.reduceProgress() , reduces, null); if (target == -1) { return null; } boolean wasRunning = reduces[target].isRunning(); Task result = reduces[target].getTaskToRun(tts.getTrackerName()); if (!wasRunning) { runningReduceTasks += 1; JobHistory.Task.logStarted(profile.getJobId(), reduces[target].getTIPId(), Values.REDUCE.name(), System.currentTimeMillis()); } return result; } /** * Find a new task to run. * @param tts The task tracker that is asking for a task * @param clusterSize The number of task trackers in the cluster * @param avgProgress The average progress of this kind of task in this job * @param tasks The list of potential tasks to try * @param firstTaskToTry The first index in tasks to check * @param cachedTasks A list of tasks that would like to run on this node * @return the index in tasks of the selected task (or -1 for no task) */ private int findNewTask(TaskTrackerStatus tts, int clusterSize, double avgProgress, TaskInProgress[] tasks, List cachedTasks) { String taskTracker = tts.getTrackerName(); // // See if there is a split over a block that is stored on // the TaskTracker checking in. That means the block // doesn't have to be transmitted from another node. // if (cachedTasks != null) { Iterator i = cachedTasks.iterator(); while (i.hasNext()) { TaskInProgress tip = (TaskInProgress)i.next(); i.remove(); if (tip.isRunnable() && !tip.isRunning() && !tip.hasFailedOnMachine(taskTracker)) { LOG.info("Choosing cached task " + tip.getTIPId()); int cacheTarget = tip.getIdWithinJob(); return cacheTarget; } } } // // If there's no cached target, see if there's // a std. task to run. // int failedTarget = -1; int specTarget = -1; for (int i = 0; i < tasks.length; i++) { TaskInProgress task = tasks[i]; if (task.isRunnable()) { // if it failed here and we haven't tried every machine, we // don't schedule it here. boolean hasFailed = task.hasFailedOnMachine(taskTracker); if (hasFailed && (task.getNumberOfFailedMachines() < clusterSize)) { continue; } boolean isRunning = task.isRunning(); if (hasFailed) { // failed tasks that aren't running can be scheduled as a last // resort if (!isRunning && failedTarget == -1) { failedTarget = i; } } else { if (!isRunning) { LOG.info("Choosing normal task " + tasks[i].getTIPId()); return i; } else if (specTarget == -1 && task.hasSpeculativeTask(avgProgress) && ! task.hasRunOnMachine(taskTracker)) { specTarget = i; } } } } if (specTarget != -1) { LOG.info("Choosing speculative task " + tasks[specTarget].getTIPId()); } else if (failedTarget != -1) { LOG.info("Choosing failed task " + tasks[failedTarget].getTIPId()); } return specTarget != -1 ? specTarget : failedTarget; } /** * A taskid assigned to this JobInProgress has reported in successfully. */ public synchronized void completedTask(TaskInProgress tip, TaskStatus status, JobTrackerMetrics metrics) { String taskid = status.getTaskId(); // Sanity check: is the TIP already complete? if (tip.isComplete()) { LOG.info("Already complete TIP " + tip.getTIPId() + " has completed task " + taskid); // Just mark this 'task' as complete tip.completedTask(taskid); // Let the JobTracker cleanup this taskid if the job isn't running if (this.status.getRunState() != JobStatus.RUNNING) { jobtracker.markCompletedTaskAttempt(status.getTaskTracker(), taskid); } return; } LOG.info("Task '" + taskid + "' has completed " + tip.getTIPId() + " successfully."); // Update jobhistory String taskTrackerName = status.getTaskTracker(); if(status.getIsMap()){ JobHistory.MapAttempt.logStarted(profile.getJobId(), tip.getTIPId(), status.getTaskId(), status.getStartTime(), taskTrackerName); JobHistory.MapAttempt.logFinished(profile.getJobId(), tip.getTIPId(), status.getTaskId(), status.getFinishTime(), taskTrackerName); JobHistory.Task.logFinished(profile.getJobId(), tip.getTIPId(), Values.MAP.name(), status.getFinishTime()); }else{ JobHistory.ReduceAttempt.logStarted(profile.getJobId(), tip.getTIPId(), status.getTaskId(), status.getStartTime(), taskTrackerName); JobHistory.ReduceAttempt.logFinished(profile.getJobId(), tip.getTIPId(), status.getTaskId(), status.getShuffleFinishTime(), status.getSortFinishTime(), status.getFinishTime(), taskTrackerName); JobHistory.Task.logFinished(profile.getJobId(), tip.getTIPId(), Values.REDUCE.name(), status.getFinishTime()); } // Mark the TIP as complete tip.completed(taskid); // Update the running/finished map/reduce counts if (tip.isMapTask()){ runningMapTasks -= 1; finishedMapTasks += 1; metrics.completeMap(); } else{ runningReduceTasks -= 1; finishedReduceTasks += 1; metrics.completeReduce(); } // // Figure out whether the Job is done // boolean allDone = true; for (int i = 0; i < maps.length; i++) { if (! maps[i].isComplete()) { allDone = false; break; } } if (allDone) { if (tip.isMapTask()) { this.status.setMapProgress(1.0f); } for (int i = 0; i < reduces.length; i++) { if (! reduces[i].isComplete()) { allDone = false; break; } } } // // If all tasks are complete, then the job is done! // if (this.status.getRunState() == JobStatus.RUNNING && allDone) { this.status.setRunState(JobStatus.SUCCEEDED); this.status.setReduceProgress(1.0f); this.finishTime = System.currentTimeMillis(); garbageCollect(); LOG.info("Job " + this.status.getJobId() + " has completed successfully."); JobHistory.JobInfo.logFinished(this.status.getJobId(), finishTime, this.finishedMapTasks, this.finishedReduceTasks, failedMapTasks, failedReduceTasks); metrics.completeJob(); } else if (this.status.getRunState() != JobStatus.RUNNING) { // The job has been killed/failed, // JobTracker should cleanup this task jobtracker.markCompletedTaskAttempt(status.getTaskTracker(), taskid); } } /** * Kill the job and all its component tasks. */ public synchronized void kill() { if (status.getRunState() != JobStatus.FAILED) { LOG.info("Killing job '" + this.status.getJobId() + "'"); this.status = new JobStatus(status.getJobId(), 1.0f, 1.0f, JobStatus.FAILED); this.finishTime = System.currentTimeMillis(); this.runningMapTasks = 0; this.runningReduceTasks = 0; // // kill all TIPs. // for (int i = 0; i < maps.length; i++) { maps[i].kill(); } for (int i = 0; i < reduces.length; i++) { reduces[i].kill(); } JobHistory.JobInfo.logFailed(this.status.getJobId(), finishTime, this.finishedMapTasks, this.finishedReduceTasks); garbageCollect(); } } /** * A task assigned to this JobInProgress has reported in as failed. * Most of the time, we'll just reschedule execution. However, after * many repeated failures we may instead decide to allow the entire * job to fail. * * Even if a task has reported as completed in the past, it might later * be reported as failed. That's because the TaskTracker that hosts a map * task might die before the entire job can complete. If that happens, * we need to schedule reexecution so that downstream reduce tasks can * obtain the map task's output. */ private void failedTask(TaskInProgress tip, String taskid, TaskStatus status, String trackerName, boolean wasRunning, boolean wasComplete) { // Mark the taskid as a 'failure' tip.failedSubTask(taskid, trackerName); boolean isRunning = tip.isRunning(); boolean isComplete = tip.isComplete(); //update running count on task failure. if (wasRunning && !isRunning) { if (tip.isMapTask()){ runningMapTasks -= 1; } else { runningReduceTasks -= 1; } } // the case when the map was complete but the task tracker went down. if (wasComplete && !isComplete) { if (tip.isMapTask()){ finishedMapTasks -= 1; } } // update job history String taskTrackerName = status.getTaskTracker(); if (status.getIsMap()) { JobHistory.MapAttempt.logStarted(profile.getJobId(), tip.getTIPId(), status.getTaskId(), status.getStartTime(), taskTrackerName); JobHistory.MapAttempt.logFailed(profile.getJobId(), tip.getTIPId(), status.getTaskId(), System.currentTimeMillis(), taskTrackerName, status.getDiagnosticInfo()); } else { JobHistory.ReduceAttempt.logStarted(profile.getJobId(), tip.getTIPId(), status.getTaskId(), status.getStartTime(), taskTrackerName); JobHistory.ReduceAttempt.logFailed(profile.getJobId(), tip.getTIPId(), status.getTaskId(), System.currentTimeMillis(), taskTrackerName, status.getDiagnosticInfo()); } // After this, try to assign tasks with the one after this, so that // the failed task goes to the end of the list. if (tip.isMapTask()) { failedMapTasks++; } else { failedReduceTasks++; } // // Let the JobTracker know that this task has failed // jobtracker.markCompletedTaskAttempt(status.getTaskTracker(), taskid); // // Check if we need to kill the job because of too many failures // if (tip.isFailed()) { LOG.info("Aborting job " + profile.getJobId()); JobHistory.Task.logFailed(profile.getJobId(), tip.getTIPId(), tip.isMapTask() ? Values.MAP.name():Values.REDUCE.name(), System.currentTimeMillis(), status.getDiagnosticInfo()); JobHistory.JobInfo.logFailed(profile.getJobId(), System.currentTimeMillis(), this.finishedMapTasks, this.finishedReduceTasks); kill(); } } /** * Fail a task with a given reason, but without a status object. * @author Owen O'Malley * @param tip The task's tip * @param taskid The task id * @param reason The reason that the task failed * @param trackerName The task tracker the task failed on */ public void failedTask(TaskInProgress tip, String taskid, String reason, TaskStatus.Phase phase, String hostname, String trackerName, JobTrackerMetrics metrics) { TaskStatus status = new TaskStatus(taskid, tip.isMapTask(), 0.0f, TaskStatus.State.FAILED, reason, reason, trackerName, phase); updateTaskStatus(tip, status, metrics); JobHistory.Task.logFailed(profile.getJobId(), tip.getTIPId(), tip.isMapTask() ? Values.MAP.name() : Values.REDUCE.name(), System.currentTimeMillis(), reason); } /** * The job is dead. We're now GC'ing it, getting rid of the job * from all tables. Be sure to remove all of this job's tasks * from the various tables. */ synchronized void garbageCollect() { // Let the JobTracker know that a job is complete jobtracker.finalizeJob(this); try { // Definitely remove the local-disk copy of the job file if (localJobFile != null) { localFs.delete(localJobFile); localJobFile = null; } if (localJarFile != null) { localFs.delete(localJarFile); localJarFile = null; } // JobClient always creates a new directory with job files // so we remove that directory to cleanup FileSystem fs = FileSystem.get(conf); fs.delete(new Path(profile.getJobFile()).getParent()); // Delete temp dfs dirs created if any, like in case of // speculative exn of reduces. String tempDir = conf.get("mapred.system.dir") + "/job_" + uniqueString; fs.delete(new Path(tempDir)); } catch (IOException e) { LOG.warn("Error cleaning up "+profile.getJobId()+": "+e); } } /** * Return the TaskInProgress that matches the tipid. */ public TaskInProgress getTaskInProgress(String tipid){ for (int i = 0; i < maps.length; i++) { if (tipid.equals(maps[i].getTIPId())){ return maps[i]; } } for (int i = 0; i < reduces.length; i++) { if (tipid.equals(reduces[i].getTIPId())){ return reduces[i]; } } return null; } /** * Find the details of someplace where a map has finished * @param mapId the id of the map * @return the task status of the completed task */ public TaskStatus findFinishedMap(int mapId) { TaskInProgress tip = maps[mapId]; if (tip.isComplete()) { TaskStatus[] statuses = tip.getTaskStatuses(); for(int i=0; i < statuses.length; i++) { if (statuses[i].getRunState() == TaskStatus.State.SUCCEEDED) { return statuses[i]; } } } return null; } public TaskCompletionEvent[] getTaskCompletionEvents(int fromEventId) { TaskCompletionEvent[] events = TaskCompletionEvent.EMPTY_ARRAY; if( taskCompletionEvents.size() > fromEventId) { events = (TaskCompletionEvent[])taskCompletionEvents.subList( fromEventId, taskCompletionEvents.size()). toArray(events); } return events; } }
package com.jurajmasar.ib.dossier.data_structures; /** * Vector class - data structure with dynamic allocation of memory. * 'Enhanced array' * * @author Juraj Masar * @version 0.1 */ import com.jurajmasar.ib.dossier.root.Static; /** DOSSIER: HL mastery 12-15 - ADT **/ public class Vector<O> { private VectorItem<O> first; //pointer to first VectorItem in the vector private VectorItem<O> last; //pointer to last VectorItem in the vector private int count; //number of elements in Vector /** * Constructor for objects of class Vector */ public Vector() { // initialise instance variables first = null; last = null; count = 0; } /** * Constructor for objects of class Vector * which populates the Vector with data given. * @param c number of elements to insert * @param o default element */ public Vector(int c, O o) { // initialise instance variables first = null; last = null; count = 0; for (int i=0;i<=c-1;i++) pushBack(o); } /** * Returns the number of elements in the Vector * * @return count of the elements in Vector */ public int size() { return count; } /** * Checks whether the vector is empty. * * @return true if the vector is empty */ public boolean isEmpty() { if (count > 0) return false; else return true; } /** * Removes all elements from the Vector * */ public void clear() { first = null; last = null; count = 0; } /** * Returns a clone of this vector. * * @return vector */ public Vector clone() { Vector<O> v = new Vector<O>(); VectorItem<O> vi = first; while (vi != null) { v.pushBack (vi.getObject()); vi = vi.getNext(); } return v; } /** * Returns the first element * * @return object */ public O first() { if (isEmpty()) return null; return first.getObject(); } /** * Returns the first element * * @return VectorItem */ public VectorItem<O> firstVectorItem() { if (isEmpty()) return null; return first; } /** * Returns the last element * * @return object */ public O last() { if (isEmpty()) return null; return last.getObject(); } /** * Adds element to the end of Vector * * @param o object to be inserted */ public void pushBack (O o) { //create new VectorItem VectorItem<O> vi = new VectorItem<O> (o,last, null); if (isEmpty()) { last = vi; first = vi; } else { last.setNext(vi); last = vi; } count++; } /** * Adds element to the beginning of the vector * * @param o object to add */ public void pushFront(O o) { //create new VectorItem VectorItem<O> vi = new VectorItem<O> (o,null, first); first = vi; if (isEmpty()) last = vi; count++; } /** * Removes and returns the last element of the vector. * * @return object */ public O popBack() { if (!isEmpty()) { VectorItem<O> l = last; last = last.getPrevious(); count--; if (isEmpty()) first = null; else last.setNext(null); return l.getObject(); } else return null; } /** * Removes and returns the first element of the vector. * * @return object */ public O popFront() { if (!isEmpty()) { VectorItem<O> l = first; first = first.getNext(); count--; if (isEmpty()) last = null; else first.setPrevious(null); return l.getObject(); } else return null; } /** * Returns object at index given. * * @param index index of the element * @return object */ public O get(int index) { if (index >= count || index < 0) return null; if (index > count/2) { //search from the end VectorItem<O> vi = last; for (int i=count-1;i>index;i--) { vi = vi.getPrevious(); } return vi.getObject(); } else { //search from the beginning VectorItem<O> vi = first; for (int i=0;i<index;i++) { vi = vi.getNext(); } return vi.getObject(); } } /** * Returns the VectorItem at index given. * * @param index index of the element * @return Object */ public VectorItem<O> getVectorItem(int index) { if (index >= count || index < 0) return null; if (index > count/2) { //search from the end VectorItem<O> vi = last; for (int i=count-1;i>index;i--) { vi = vi.getPrevious(); } return vi; } else { //search from the beginning VectorItem<O> vi = first; for (int i=0;i<index;i++) { vi = vi.getNext(); } return vi; } } /** * Sets object to the vector at position given. * * @param o object * @param index position * @return true if operation was successful */ public boolean set(O o, int index) { if (index >= count || index < 0) return false; VectorItem<O> vi = first; for (int i=0;i<=index-1;i++) { vi = vi.getNext(); } vi.setObject(o); return true; } /** * Returns the index of first occurance of given object. * * @param o object to search for * @return index of the found object or -1 */ public int indexOf(O o) { VectorItem<O> vi = first; for (int i=0;i<=count-1;i++) { if (vi.getObject().equals(o)) return i; vi = vi.getNext(); } return -1; } /** * Inserts object given to the position given in Vector. * * @param index index where to insert * @param o object to insert * @return true if successful */ public boolean insertAt(int index, O o) { if (index < 0 || index > size()) return false; //out of range if (index == size()) //last { pushBack (o); return true; } else if (index == 0) { pushFront (o); return true; } else { VectorItem<O> before, after; before = getVectorItem(index-1); after = getVectorItem(index); VectorItem<O> vi = new VectorItem<O> (o,before,after); before.setNext(vi); after.setPrevious(vi); return true; } } /** * Inserts the object given after the other given object. * * @param where object after which new object should be inserted * @param what object to insert * @return true if successful */ public boolean insertAfter(O where, O what) { return insertAt(indexOf(where)+1,what); } /** * Inserts the object given before the other given object. * * @param where object before which new object should be inserted * @param what object to insert * @return true if successful */ public boolean insertBefore(O where, O what) { return insertAt(indexOf(where)-1,what); } /** * Returns object which is situated in the Vector just * before the object given or null * * @param a object to find * @return object before the object given */ public O before(O a) { int index = indexOf(a); if (index != -1 && index != 0) //is defined and not first return get(index-1); else return null; } /** * Returns object which is situated in the Vector just * after the object given or null * * @param a object to find * @return object before the object given */ public O after(O a) { int index = indexOf(a); if (index != -1 && index != size()-1) //is defined and not last return get(index+1); else return null; } /** * Returns the content of the vector in an array * * @return array of objects */ public O[] toArray() { @SuppressWarnings("unchecked") O[] a = (O[]) new Object[count]; VectorItem<O> vi = first; for (int i=0;i<=count-1;i++) { a[i] = vi.getObject(); vi = vi.getNext(); } return a; } /** * Returns the content of the vector in an array of Integers * * @return array of int */ public int[] toIntArray() { int[] a = new int[count]; VectorItem<O> vi = first; for (int i=0;i<=count-1;i++) { a[i] = (Integer) vi.getObject(); vi = vi.getNext(); } return a; } /** * Returns the content of the vector in an array of Integers * * @return array of int */ public long[] toLongArray() { long[] a = new long[count]; VectorItem<O> vi = first; for (int i=0;i<=count-1;i++) { a[i] = (Long) vi.getObject(); vi = vi.getNext(); } return a; } /** * Returns the content of the vector in an array of Items * * @return array of Item */ public Item[] toItemArray() { Item[] a = new Item[count]; VectorItem<O> vi = first; for (int i=0;i<=count-1;i++) { a[i] = (Item) vi.getObject(); vi = vi.getNext(); } return a; } /** * Returns the content of the vector in an array of Distributors * * @return array of Distributor */ public Distributor[] toDistributorArray() { Distributor[] a = new Distributor[count]; VectorItem<O> vi = first; for (int i=0;i<=count-1;i++) { a[i] = (Distributor) vi.getObject(); vi = vi.getNext(); } return a; } /** * Returns the content of the vector in an array of Strings * * @return array of Strings */ public String[] toStringArray() { String[] a = new String[count]; VectorItem<O> vi = first; for (int i=0;i<=count-1;i++) { a[i] = (String) vi.getObject(); vi = vi.getNext(); } return a; } /** * Returns the content of the vector in a String * * @return string */ public String toString() { String s = new String(); VectorItem<O> vi = first; for (int i=0;i<=count-1;i++) { s += vi.getObject().toString()+ Static.defaultSeparator; vi = vi.getNext(); } return s; } }
/* Copyright (c) 2010, NullNoname All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of NullNoname nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package mu.nu.nullpo.gui.sdl; import mu.nu.nullpo.gui.GameKeyDummy; import mu.nu.nullpo.util.CustomProperties; import sdljava.event.SDLKey; import sdljava.joystick.HatState; /** * Key input state manager for SDL */ public class GameKeySDL extends GameKeyDummy { /** Key input state (Used by all game states) */ public static GameKeySDL gamekey[]; /** Default key mappings */ public static int[][][] DEFAULTKEYS = { // Ingame { // Blockbox type { SDLKey.SDLK_UP,SDLKey.SDLK_DOWN,SDLKey.SDLK_LEFT,SDLKey.SDLK_RIGHT, SDLKey.SDLK_z,SDLKey.SDLK_x,SDLKey.SDLK_a,SDLKey.SDLK_SPACE,SDLKey.SDLK_d,SDLKey.SDLK_s, SDLKey.SDLK_F12,SDLKey.SDLK_ESCAPE,SDLKey.SDLK_F11,SDLKey.SDLK_F10,SDLKey.SDLK_n,SDLKey.SDLK_F5 }, // Guideline games type { SDLKey.SDLK_SPACE,SDLKey.SDLK_DOWN,SDLKey.SDLK_LEFT,SDLKey.SDLK_RIGHT, SDLKey.SDLK_z,SDLKey.SDLK_UP,SDLKey.SDLK_c,SDLKey.SDLK_LSHIFT,SDLKey.SDLK_x,SDLKey.SDLK_v,SDLKey.SDLK_F12, SDLKey.SDLK_ESCAPE,SDLKey.SDLK_F11,SDLKey.SDLK_F10,SDLKey.SDLK_n,SDLKey.SDLK_F5 }, // NullpoMino classic type { SDLKey.SDLK_UP,SDLKey.SDLK_DOWN,SDLKey.SDLK_LEFT,SDLKey.SDLK_RIGHT, SDLKey.SDLK_a,SDLKey.SDLK_s,SDLKey.SDLK_d,SDLKey.SDLK_z,SDLKey.SDLK_x,SDLKey.SDLK_c, SDLKey.SDLK_ESCAPE,SDLKey.SDLK_F1,SDLKey.SDLK_F12,SDLKey.SDLK_F11,SDLKey.SDLK_n,SDLKey.SDLK_F10 }, }, // Menu { // Blockbox type { SDLKey.SDLK_UP,SDLKey.SDLK_DOWN,SDLKey.SDLK_LEFT,SDLKey.SDLK_RIGHT, SDLKey.SDLK_RETURN,SDLKey.SDLK_ESCAPE,SDLKey.SDLK_a,SDLKey.SDLK_SPACE,SDLKey.SDLK_d,SDLKey.SDLK_s, SDLKey.SDLK_F12,SDLKey.SDLK_F1,SDLKey.SDLK_F11,SDLKey.SDLK_F10,SDLKey.SDLK_n,SDLKey.SDLK_F5 }, // Guideline games type { SDLKey.SDLK_UP,SDLKey.SDLK_DOWN,SDLKey.SDLK_LEFT,SDLKey.SDLK_RIGHT, SDLKey.SDLK_RETURN,SDLKey.SDLK_ESCAPE,SDLKey.SDLK_c,SDLKey.SDLK_LSHIFT,SDLKey.SDLK_x,SDLKey.SDLK_v, SDLKey.SDLK_F12,SDLKey.SDLK_F1,SDLKey.SDLK_F11,SDLKey.SDLK_F10,SDLKey.SDLK_n,SDLKey.SDLK_F5 }, // NullpoMino classic type { SDLKey.SDLK_UP,SDLKey.SDLK_DOWN,SDLKey.SDLK_LEFT,SDLKey.SDLK_RIGHT, SDLKey.SDLK_a,SDLKey.SDLK_s,SDLKey.SDLK_d,SDLKey.SDLK_z,SDLKey.SDLK_x,SDLKey.SDLK_c, SDLKey.SDLK_ESCAPE,SDLKey.SDLK_F1,SDLKey.SDLK_F12,SDLKey.SDLK_F11,SDLKey.SDLK_n,SDLKey.SDLK_F10 }, }, }; /** * Init everything */ public static void initGlobalGameKeySDL() { gamekey = new GameKeySDL[2]; gamekey[0] = new GameKeySDL(0); gamekey[1] = new GameKeySDL(1); } /** * Default constructor */ public GameKeySDL() { super(); } /** * Constructor with player number param * @param pl Player number */ public GameKeySDL(int pl) { super(pl); } /** * Update button input status * @param keyboard Keyboard input array */ public void update(boolean[] keyboard) { update(keyboard, null, 0, 0, null, false); } /** * Update button input status * @param keyboard Keyboard input array * @param ingame true if ingame */ public void update(boolean[] keyboard, boolean ingame) { update(keyboard, null, 0, 0, null, ingame); } /** * Update button input status * @param keyboard Keyboard input array * @param joyButton Joystick button input array (Can be null) * @param joyX Joystick X * @param joyY Joystick Y * @param hat Joystick HatState (Can be null) */ public void update(boolean[] keyboard, boolean[] joyButton, int joyX, int joyY, HatState hat) { update(keyboard, joyButton, joyX, joyY, hat, false); } /** * Update button input status * @param keyboard Keyboard input array * @param joyButton Joystick button input array (Can be null) * @param joyX Joystick X * @param joyY Joystick Y * @param hat Joystick HatState (Can be null) * @param ingame true if ingame */ public void update(boolean[] keyboard, boolean[] joyButton, int joyX, int joyY, HatState hat, boolean ingame) { for(int i = 0; i < MAX_BUTTON; i++) { int[] kmap = ingame ? keymap : keymapNav; boolean flag = keyboard[kmap[i]]; if(i == BUTTON_UP) { // Up if( (flag) || (joyY < -joyBorder) || ((hat != null) && (hat.hatUp())) ) { inputstate[i]++; } else { inputstate[i] = 0; } } else if(i == BUTTON_DOWN) { // Down if( (flag) || (joyY > joyBorder) || ((hat != null) && (hat.hatDown())) ) { inputstate[i]++; } else { inputstate[i] = 0; } } else if(i == BUTTON_LEFT) { // Left if((flag) || (joyX < -joyBorder) || ((hat != null) && (hat.hatLeft())) ) { inputstate[i]++; } else { inputstate[i] = 0; } } else if(i == BUTTON_RIGHT) { // Right if((flag) || (joyX > joyBorder) || ((hat != null) && (hat.hatRight())) ) { inputstate[i]++; } else { inputstate[i] = 0; } } else { // Misc buttons boolean flag2 = false; if(joyButton != null) { try { flag2 = joyButton[buttonmap[i]]; } catch (ArrayIndexOutOfBoundsException e) {} } if((flag) || (flag2)) { inputstate[i]++; } else { inputstate[i] = 0; } } } } /** * Load key settings * @param prop Property file to read from */ public void loadConfig(CustomProperties prop) { super.loadConfig(prop); joyBorder = prop.getProperty("joyBorder.p" + player, 0); } /** * Reset keyboard settings to default (Uses Blockbox type settings) */ public void loadDefaultKeymap() { loadDefaultKeymap(0); } /** * Reset keyboard settings to default * @param type Settings type (0=Blockbox 1=Guideline 2=NullpoMino-Classic) */ public void loadDefaultKeymap(int type) { loadDefaultGameKeymap(type); loadDefaultMenuKeymap(type); } /** * Reset in-game keyboard settings to default. Menu keys are unchanged. * @param type Settings type (0=Blockbox 1=Guideline 2=NullpoMino-Classic) */ public void loadDefaultGameKeymap(int type) { for(int i = 0; i < keymap.length; i++) { keymap[i] = DEFAULTKEYS[0][type][i]; } } /** * Reset menu keyboard settings to default. In-game keys are unchanged. * @param type Settings type (0=Blockbox 1=Guideline 2=NullpoMino-Classic) */ public void loadDefaultMenuKeymap(int type) { for(int i = 0; i < keymapNav.length; i++) { keymapNav[i] = DEFAULTKEYS[1][type][i]; } } }
/* * Copyright 2014 Uwe Trottmann * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.battlelancer.seriesguide.util; import android.content.ContentProviderOperation; import android.content.ContentValues; import android.content.Context; import android.content.OperationApplicationException; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.preference.PreferenceManager; import android.text.TextUtils; import android.widget.Toast; import com.battlelancer.seriesguide.R; import com.battlelancer.seriesguide.backend.HexagonTools; import com.battlelancer.seriesguide.backend.settings.HexagonSettings; import com.battlelancer.seriesguide.enums.EpisodeFlags; import com.battlelancer.seriesguide.provider.SeriesGuideContract; import com.battlelancer.seriesguide.settings.TraktCredentials; import com.google.api.client.util.DateTime; import com.uwetrottmann.androidutils.AndroidUtils; import com.uwetrottmann.seriesguide.backend.episodes.Episodes; import com.uwetrottmann.seriesguide.backend.episodes.model.Episode; import com.uwetrottmann.seriesguide.backend.episodes.model.EpisodeList; import de.greenrobot.event.EventBus; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import timber.log.Timber; public class EpisodeTools { private static final int EPISODE_MAX_BATCH_SIZE = 500; /** * Lower season or if season is equal has to have a lower episode number. Must be watched or * skipped, excludes special episodes (because their release times are spread over all * seasons). */ private static final String SELECTION_PREVIOUS_WATCHED = SeriesGuideContract.Episodes.SEASON + ">0" + " AND " + SeriesGuideContract.Episodes.WATCHED + "!=" + EpisodeFlags.UNWATCHED + " AND (" + SeriesGuideContract.Episodes.SEASON + "<? OR " + "(" + SeriesGuideContract.Episodes.SEASON + "=? AND " + SeriesGuideContract.Episodes.NUMBER + "<?)" + ")"; /** * Order by season, then by number, then by release time. */ private static final String ORDER_PREVIOUS_WATCHED = SeriesGuideContract.Episodes.SEASON + " DESC" + "," + SeriesGuideContract.Episodes.NUMBER + " DESC" + "," + SeriesGuideContract.Episodes.FIRSTAIREDMS + " DESC"; private static final String[] PROJECTION_EPISODE = new String[] { SeriesGuideContract.Episodes._ID }; /** * Checks the database whether there is an entry for this episode. */ public static boolean isEpisodeExists(Context context, int episodeTvdbId) { Cursor query = context.getContentResolver().query( SeriesGuideContract.Episodes.buildEpisodeUri(episodeTvdbId), new String[] { SeriesGuideContract.Episodes._ID }, null, null, null ); if (query == null) { return false; } boolean isExists = query.getCount() > 0; query.close(); return isExists; } public static boolean isCollected(int collectedFlag) { return collectedFlag == 1; } public static boolean isSkipped(int episodeFlags) { return episodeFlags == EpisodeFlags.SKIPPED; } public static boolean isUnwatched(int episodeFlags) { return episodeFlags == EpisodeFlags.UNWATCHED; } public static boolean isWatched(int episodeFlags) { return episodeFlags == EpisodeFlags.WATCHED; } public static void validateFlags(int episodeFlags) { boolean hasValidFlag = false; if (isUnwatched(episodeFlags)) { return; } if (isSkipped(episodeFlags)) { return; } if (isWatched(episodeFlags)) { return; } if (!hasValidFlag) { throw new IllegalArgumentException( "Did not pass a valid episode flag. See EpisodeFlags class for details."); } } public static void episodeWatched(Context context, int showTvdbId, int episodeTvdbId, int season, int episode, int episodeFlags) { validateFlags(episodeFlags); execute(context, new EpisodeWatchedType(context, showTvdbId, episodeTvdbId, season, episode, episodeFlags) ); } public static void episodeCollected(Context context, int showTvdbId, int episodeTvdbId, int season, int episode, boolean isFlag) { execute(context, new EpisodeCollectedType(context, showTvdbId, episodeTvdbId, season, episode, isFlag ? 1 : 0) ); } /** * Flags all episodes released previous to this one as watched (excluding episodes with no * release date). */ public static void episodeWatchedPrevious(Context context, int showTvdbId, long episodeFirstAired) { execute(context, new EpisodeWatchedPreviousType(context, showTvdbId, episodeFirstAired) ); } public static void seasonWatched(Context context, int showTvdbId, int seasonTvdbId, int season, int episodeFlags) { validateFlags(episodeFlags); execute(context, new SeasonWatchedType(context, showTvdbId, seasonTvdbId, season, episodeFlags) ); } public static void seasonCollected(Context context, int showTvdbId, int seasonTvdbId, int season, boolean isFlag) { execute(context, new SeasonCollectedType(context, showTvdbId, seasonTvdbId, season, isFlag ? 1 : 0) ); } public static void showWatched(Context context, int showTvdbId, boolean isFlag) { execute(context, new ShowWatchedType(context, showTvdbId, isFlag ? 1 : 0) ); } public static void showCollected(Context context, int showTvdbId, boolean isFlag) { execute(context, new ShowCollectedType(context, showTvdbId, isFlag ? 1 : 0) ); } /** * Run the task on the thread pool. */ private static void execute(@Nonnull Context context, @Nonnull FlagType type) { AndroidUtils.executeOnPool( new EpisodeFlagTask(context.getApplicationContext(), type) ); } public enum EpisodeAction { EPISODE_WATCHED, EPISODE_COLLECTED, EPISODE_WATCHED_PREVIOUS, SEASON_WATCHED, SEASON_COLLECTED, SHOW_WATCHED, SHOW_COLLECTED } /** * Sent once the database ops are finished, sending to trakt may still be in progress or queued * due to no available connection. */ public static class EpisodeActionCompletedEvent { public FlagType mType; public EpisodeActionCompletedEvent(FlagType type) { mType = type; } } public static abstract class FlagType { protected Context mContext; protected EpisodeAction mAction; protected int mShowTvdbId; protected int mEpisodeFlag; public FlagType(Context context, int showTvdbId) { mContext = context; mShowTvdbId = showTvdbId; } public abstract Uri getUri(); public abstract String getSelection(); /** * Builds a list of episodes ready to upload to hexagon. However, the show TVDb id is not * set. It should be set in a wrapping {@link com.uwetrottmann.seriesguide.backend.episodes.model.EpisodeList}. */ public List<Episode> getEpisodesForHexagon() { return buildEpisodeList(); } public abstract List<FlagTapeEntry.Flag> getEpisodesForTrakt(); public int getShowTvdbId() { return mShowTvdbId; } private List<Episode> buildEpisodeList() { List<Episode> episodes = new ArrayList<>(); // determine uri Uri uri = getUri(); String selection = getSelection(); // query and add episodes to list final Cursor episodeCursor = mContext.getContentResolver().query( uri, new String[] { SeriesGuideContract.Episodes.SEASON, SeriesGuideContract.Episodes.NUMBER }, selection, null, null ); if (episodeCursor != null) { while (episodeCursor.moveToNext()) { Episode episode = new Episode(); setEpisodeProperties(episode); episode.setSeasonNumber(episodeCursor.getInt(0)); episode.setEpisodeNumber(episodeCursor.getInt(1)); episodes.add(episode); } episodeCursor.close(); } return episodes; } /** * Set any additional properties besides show id, season or episode number. */ protected abstract void setEpisodeProperties(Episode episode); /** * Builds a list of {@link com.battlelancer.seriesguide.util.FlagTapeEntry.Flag} objects to * pass to a {@link com.battlelancer.seriesguide.util.FlagTapedTask} to submit to * trakt. */ protected List<FlagTapeEntry.Flag> createEpisodeFlags() { List<FlagTapeEntry.Flag> episodes = new ArrayList<>(); // determine uri Uri uri = getUri(); String selection = getSelection(); // query and add episodes to list final Cursor episodeCursor = mContext.getContentResolver().query( uri, new String[] { SeriesGuideContract.Episodes.SEASON, SeriesGuideContract.Episodes.NUMBER }, selection, null, null ); if (episodeCursor != null) { while (episodeCursor.moveToNext()) { episodes.add(new FlagTapeEntry.Flag(episodeCursor.getInt(0), episodeCursor.getInt(1))); } episodeCursor.close(); } return episodes; } /** * Return the column which should get updated, either {@link com.battlelancer.seriesguide.provider.SeriesGuideContract.Episodes} * .WATCHED or {@link * com.battlelancer.seriesguide.provider.SeriesGuideContract.Episodes}.COLLECTED. */ protected abstract String getColumn(); protected abstract ContentValues getContentValues(); /** * Builds and executes the database op required to flag episodes in the local database. */ public void updateDatabase() { // determine query uri Uri uri = getUri(); if (uri == null) { return; } // build and execute query ContentValues values = getContentValues(); mContext.getContentResolver().update(uri, values, getSelection(), null); // notify the content provider for udpates mContext.getContentResolver() .notifyChange(SeriesGuideContract.Episodes.CONTENT_URI, null); mContext.getContentResolver() .notifyChange(SeriesGuideContract.ListItems.CONTENT_WITH_DETAILS_URI, null); } /** * Determines the last watched episode and returns its TVDb id or -1 if it can't be * determined. */ protected abstract int getLastWatchedEpisodeTvdbId(); /** * Saves the last watched episode for a show to the database. */ public void storeLastEpisode() { int lastWatchedId = getLastWatchedEpisodeTvdbId(); if (lastWatchedId != -1) { // set latest watched ContentValues values = new ContentValues(); values.put(SeriesGuideContract.Shows.LASTWATCHEDID, lastWatchedId); mContext.getContentResolver().update( SeriesGuideContract.Shows.buildShowUri(String.valueOf(mShowTvdbId)), values, null, null); } } /** * Returns the text which should be prepended to the submission status message. Tells e.g. * which episode was flagged watched. */ public abstract String getNotificationText(); } /** * Flagging single episodes watched or collected. */ public static abstract class EpisodeType extends FlagType { protected int mEpisodeTvdbId; protected int mSeason; protected int mEpisode; public EpisodeType(Context context, int showTvdbId, int episodeTvdbId, int season, int episode, int episodeFlags) { super(context, showTvdbId); mEpisodeTvdbId = episodeTvdbId; mSeason = season; mEpisode = episode; mEpisodeFlag = episodeFlags; } @Override public Uri getUri() { return SeriesGuideContract.Episodes.buildEpisodeUri(String.valueOf(mEpisodeTvdbId)); } @Override public String getSelection() { return null; } @Override protected ContentValues getContentValues() { ContentValues values = new ContentValues(); values.put(getColumn(), mEpisodeFlag); return values; } @Override public List<Episode> getEpisodesForHexagon() { List<Episode> episodes = new ArrayList<>(); Episode episode = new Episode(); setEpisodeProperties(episode); episode.setSeasonNumber(mSeason); episode.setEpisodeNumber(mEpisode); episodes.add(episode); return episodes; } @Override public List<FlagTapeEntry.Flag> getEpisodesForTrakt() { List<FlagTapeEntry.Flag> episodes = new ArrayList<>(); // flag a single episode episodes.add(new FlagTapeEntry.Flag(mSeason, mEpisode)); return episodes; } } public static class EpisodeWatchedType extends EpisodeType { public EpisodeWatchedType(Context context, int showTvdbId, int episodeTvdbId, int season, int episode, int episodeFlags) { super(context, showTvdbId, episodeTvdbId, season, episode, episodeFlags); mAction = EpisodeAction.EPISODE_WATCHED; } @Override protected void setEpisodeProperties(Episode episode) { episode.setWatchedFlag(mEpisodeFlag); } @Override protected String getColumn() { return SeriesGuideContract.Episodes.WATCHED; } @Override protected int getLastWatchedEpisodeTvdbId() { if (isUnwatched(mEpisodeFlag)) { // unwatched episode int lastWatchedId = -1; // don't change last watched episode by default // if modified episode is identical to last watched one (e.g. was just watched), // find an appropriate last watched episode final Cursor show = mContext.getContentResolver().query( SeriesGuideContract.Shows.buildShowUri(String.valueOf(mShowTvdbId)), new String[] { SeriesGuideContract.Shows._ID, SeriesGuideContract.Shows.LASTWATCHEDID }, null, null, null ); if (show != null) { // identical to last watched episode? if (show.moveToFirst() && show.getInt(1) == mEpisodeTvdbId) { if (mSeason == 0) { // keep last watched (= this episode) if we got a special show.close(); return -1; } lastWatchedId = 0; // re-set if we don't find one // get latest watched before this one String season = String.valueOf(mSeason); final Cursor latestWatchedEpisode = mContext.getContentResolver() .query(SeriesGuideContract.Episodes.buildEpisodesOfShowUri(String .valueOf(mShowTvdbId)), PROJECTION_EPISODE, SELECTION_PREVIOUS_WATCHED, new String[] { season, season, String.valueOf(mEpisode) }, ORDER_PREVIOUS_WATCHED ); if (latestWatchedEpisode != null) { if (latestWatchedEpisode.moveToFirst()) { lastWatchedId = latestWatchedEpisode.getInt(0); } latestWatchedEpisode.close(); } } show.close(); } return lastWatchedId; } else { // watched or skipped episode return mEpisodeTvdbId; } } @Override public String getNotificationText() { if (isSkipped(mEpisodeFlag)) { // skipping is not sent to trakt, no need for a message return null; } // show episode seen/unseen message String number = Utils.getEpisodeNumber(mContext, mSeason, mEpisode); return mContext.getString( isWatched(mEpisodeFlag) ? R.string.trakt_seen : R.string.trakt_notseen, number ); } } public static class EpisodeCollectedType extends EpisodeType { public EpisodeCollectedType(Context context, int showTvdbId, int episodeTvdbId, int season, int episode, int episodeFlags) { super(context, showTvdbId, episodeTvdbId, season, episode, episodeFlags); mAction = EpisodeAction.EPISODE_COLLECTED; } @Override protected void setEpisodeProperties(Episode episode) { episode.setIsInCollection(isCollected(mEpisodeFlag)); } @Override protected String getColumn() { return SeriesGuideContract.Episodes.COLLECTED; } @Override protected int getLastWatchedEpisodeTvdbId() { // we don't care return -1; } @Override public String getNotificationText() { String number = Utils.getEpisodeNumber(mContext, mSeason, mEpisode); return mContext.getString(mEpisodeFlag == 1 ? R.string.trakt_collected : R.string.trakt_notcollected, number); } } /** * Flagging whole seasons watched or collected. */ public static abstract class SeasonType extends FlagType { protected int mSeasonTvdbId; protected int mSeason; public SeasonType(Context context, int showTvdbId, int seasonTvdbId, int season, int episodeFlags) { super(context, showTvdbId); mSeasonTvdbId = seasonTvdbId; mSeason = season; mEpisodeFlag = episodeFlags; } public int getSeasonTvdbId() { return mSeasonTvdbId; } @Override public Uri getUri() { return SeriesGuideContract.Episodes.buildEpisodesOfSeasonUri( String.valueOf(mSeasonTvdbId)); } @Override public String getSelection() { return null; } @Override protected ContentValues getContentValues() { ContentValues values = new ContentValues(); values.put(getColumn(), mEpisodeFlag); return values; } @Override public List<FlagTapeEntry.Flag> getEpisodesForTrakt() { if (mEpisodeFlag != 0) { // watched, skipped or collected season List<FlagTapeEntry.Flag> episodes = new ArrayList<>(); episodes.add(new FlagTapeEntry.Flag(mSeason, -1)); return episodes; } else { // unwatched, not collected season return createEpisodeFlags(); } } } public static class SeasonWatchedType extends SeasonType { public SeasonWatchedType(Context context, int showTvdbId, int seasonTvdbId, int season, int episodeFlags) { super(context, showTvdbId, seasonTvdbId, season, episodeFlags); mAction = EpisodeAction.SEASON_WATCHED; } @Override protected void setEpisodeProperties(Episode episode) { episode.setWatchedFlag(mEpisodeFlag); } @Override protected String getColumn() { return SeriesGuideContract.Episodes.WATCHED; } @Override protected int getLastWatchedEpisodeTvdbId() { if (isUnwatched(mEpisodeFlag)) { // unwatched season // just reset return 0; } else { // watched or skipped season int lastWatchedId = -1; // get the last flagged episode of the season final Cursor seasonEpisodes = mContext.getContentResolver().query( SeriesGuideContract.Episodes.buildEpisodesOfSeasonUri( String.valueOf(mSeasonTvdbId)), PROJECTION_EPISODE, null, null, SeriesGuideContract.Episodes.NUMBER + " DESC" ); if (seasonEpisodes != null) { if (seasonEpisodes.moveToFirst()) { lastWatchedId = seasonEpisodes.getInt(0); } seasonEpisodes.close(); } return lastWatchedId; } } @Override public String getNotificationText() { if (isSkipped(mEpisodeFlag)) { // skipping is not sent to trakt, no need for a message return null; } String number = Utils.getEpisodeNumber(mContext, mSeason, -1); return mContext.getString( isWatched(mEpisodeFlag) ? R.string.trakt_seen : R.string.trakt_notseen, number ); } } public static class SeasonCollectedType extends SeasonType { public SeasonCollectedType(Context context, int showTvdbId, int seasonTvdbId, int season, int episodeFlags) { super(context, showTvdbId, seasonTvdbId, season, episodeFlags); mAction = EpisodeAction.SEASON_COLLECTED; } @Override protected void setEpisodeProperties(Episode episode) { episode.setIsInCollection(isCollected(mEpisodeFlag)); } @Override protected String getColumn() { return SeriesGuideContract.Episodes.COLLECTED; } @Override protected int getLastWatchedEpisodeTvdbId() { return -1; } @Override public String getNotificationText() { String number = Utils.getEpisodeNumber(mContext, mSeason, -1); return mContext.getString(mEpisodeFlag == 1 ? R.string.trakt_collected : R.string.trakt_notcollected, number); } } public static abstract class ShowType extends FlagType { public ShowType(Context context, int showTvdbId, int episodeFlags) { super(context, showTvdbId); mEpisodeFlag = episodeFlags; } @Override public Uri getUri() { return SeriesGuideContract.Episodes.buildEpisodesOfShowUri(String.valueOf(mShowTvdbId)); } @Override public String getSelection() { return null; } @Override protected ContentValues getContentValues() { ContentValues values = new ContentValues(); values.put(getColumn(), mEpisodeFlag); return values; } @Override public List<FlagTapeEntry.Flag> getEpisodesForTrakt() { // only for removing flags we need single episodes if (mEpisodeFlag == 0) { return createEpisodeFlags(); } else { return null; } } @Override public String getNotificationText() { return null; } } public static class ShowWatchedType extends ShowType { public ShowWatchedType(Context context, int showTvdbId, int episodeFlags) { super(context, showTvdbId, episodeFlags); mAction = EpisodeAction.SHOW_WATCHED; } @Override protected void setEpisodeProperties(Episode episode) { episode.setWatchedFlag(mEpisodeFlag); } @Override protected String getColumn() { return SeriesGuideContract.Episodes.WATCHED; } @Override protected int getLastWatchedEpisodeTvdbId() { if (isUnwatched(mEpisodeFlag)) { // just reset return 0; } else { // we don't care return -1; } } } public static class ShowCollectedType extends ShowType { public ShowCollectedType(Context context, int showTvdbId, int episodeFlags) { super(context, showTvdbId, episodeFlags); mAction = EpisodeAction.SHOW_COLLECTED; } @Override protected void setEpisodeProperties(Episode episode) { episode.setIsInCollection(isCollected(mEpisodeFlag)); } @Override protected String getColumn() { return SeriesGuideContract.Episodes.COLLECTED; } @Override protected int getLastWatchedEpisodeTvdbId() { // we don't care return -1; } } public static class EpisodeWatchedPreviousType extends FlagType { private long mEpisodeFirstAired; public EpisodeWatchedPreviousType(Context context, int showTvdbId, long episodeFirstAired) { super(context, showTvdbId); mEpisodeFirstAired = episodeFirstAired; mAction = EpisodeAction.EPISODE_WATCHED_PREVIOUS; } @Override public Uri getUri() { return SeriesGuideContract.Episodes.buildEpisodesOfShowUri(String.valueOf(mShowTvdbId)); } @Override public String getSelection() { return SeriesGuideContract.Episodes.FIRSTAIREDMS + "<" + mEpisodeFirstAired + " AND " + SeriesGuideContract.Episodes.FIRSTAIREDMS + ">0"; } @Override protected ContentValues getContentValues() { ContentValues values = new ContentValues(); values.put(SeriesGuideContract.Episodes.WATCHED, EpisodeFlags.WATCHED); return values; } @Override public List<FlagTapeEntry.Flag> getEpisodesForTrakt() { return createEpisodeFlags(); } @Override protected void setEpisodeProperties(Episode episode) { episode.setWatchedFlag(EpisodeFlags.WATCHED); } @Override protected String getColumn() { // not used return null; } @Override protected int getLastWatchedEpisodeTvdbId() { // we don't care return -1; } @Override public String getNotificationText() { return null; } } private static class EpisodeFlagTask extends AsyncTask<Void, Void, Integer> { private final Context mContext; private final FlagType mType; private boolean mIsSendingToTrakt; public EpisodeFlagTask(Context context, FlagType type) { mContext = context; mType = type; } @Override protected Integer doInBackground(Void... params) { // upload updated episodes to hexagon if (HexagonTools.isSignedIn(mContext)) { if (!AndroidUtils.isNetworkConnected(mContext)) { return -1; } uploadToHexagon(); } /** * Do net send to trakt if we skipped episodes, this is not supported by trakt. * However, if the skipped flag is removed this will be handled identical * to flagging as unwatched. */ // check for valid trakt credentials mIsSendingToTrakt = !isSkipped(mType.mEpisodeFlag) && TraktCredentials.get(mContext).hasCredentials(); // prepare trakt stuff if (mIsSendingToTrakt) { if (!AndroidUtils.isNetworkConnected(mContext)) { return -1; } List<FlagTapeEntry.Flag> episodes = mType.getEpisodesForTrakt(); // convert to boolean flag used by trakt (un/watched, un/collected) boolean isFlag = !isUnwatched(mType.mEpisodeFlag); // Add a new taped flag task to the tape queue FlagTapeEntryQueue.getInstance(mContext).add( new FlagTapeEntry(mType.mAction, mType.mShowTvdbId, episodes, isFlag)); } // always update local database mType.updateDatabase(); mType.storeLastEpisode(); return 0; } private void uploadToHexagon() { List<Episode> batch = mType.getEpisodesForHexagon(); EpisodeList uploadWrapper = new EpisodeList(); uploadWrapper.setShowTvdbId(mType.mShowTvdbId); // upload in small batches List<Episode> smallBatch = new ArrayList<>(); while (!batch.isEmpty()) { // batch small enough? upload right away if (batch.size() <= EPISODE_MAX_BATCH_SIZE) { uploadWrapper.setEpisodes(batch); Upload.flagsToHexagon(mContext, uploadWrapper); return; } // build smaller batch for (int count = 0; count < EPISODE_MAX_BATCH_SIZE; count++) { if (batch.isEmpty()) { break; } smallBatch.add(batch.remove(0)); } // upload small batch uploadWrapper.setEpisodes(smallBatch); Upload.flagsToHexagon(mContext, uploadWrapper); // reset smallBatch = new ArrayList<>(); } } @Override protected void onPostExecute(Integer result) { if (result == -1) { Toast.makeText(mContext, R.string.offline, Toast.LENGTH_LONG).show(); } // display a small toast if submission to trakt was successful else if (mIsSendingToTrakt) { int status = R.string.trakt_submitqueued; if (mType.mAction == EpisodeAction.SHOW_WATCHED || mType.mAction == EpisodeAction.SHOW_COLLECTED || mType.mAction == EpisodeAction.EPISODE_WATCHED_PREVIOUS) { // simple ack Toast.makeText(mContext, mContext.getString(status), Toast.LENGTH_SHORT).show(); } else { // detailed ack String message = mType.getNotificationText(); Toast.makeText(mContext, message + " " + mContext.getString(status), Toast.LENGTH_SHORT).show(); } } EventBus.getDefault().post(new EpisodeActionCompletedEvent(mType)); } } public static class Download { /** * Downloads all episodes changed since the last time this was called and applies changes * to the database. */ public static boolean flagsFromHexagon(Context context) { List<Episode> episodes; boolean hasMoreEpisodes = true; String cursor = null; long currentTime = System.currentTimeMillis(); DateTime lastSyncTime = new DateTime(HexagonSettings.getLastEpisodesSyncTime(context)); Timber.d("flagsFromHexagon: downloading changed episode flags since " + lastSyncTime); while (hasMoreEpisodes) { try { Episodes.Get request = HexagonTools.getEpisodesService(context).get() .setUpdatedSince(lastSyncTime) .setLimit(EPISODE_MAX_BATCH_SIZE); if (!TextUtils.isEmpty(cursor)) { request.setCursor(cursor); } EpisodeList response = request.execute(); if (response == null) { // we're done here Timber.d("flagsFromHexagon: response was null, done here"); break; } episodes = response.getEpisodes(); // check for more items if (response.getCursor() != null) { cursor = response.getCursor(); } else { hasMoreEpisodes = false; } } catch (IOException e) { Timber.e(e, "flagsFromHexagon: failed to download changed episode flags"); return false; } if (episodes == null || episodes.size() == 0) { // nothing to do here break; } // build batch of episode flag updates ArrayList<ContentProviderOperation> batch = new ArrayList<>(); for (Episode episode : episodes) { ContentValues values = new ContentValues(); if (episode.getWatchedFlag() != null) { values.put(SeriesGuideContract.Episodes.WATCHED, episode.getWatchedFlag()); } if (episode.getIsInCollection() != null) { values.put(SeriesGuideContract.Episodes.COLLECTED, episode.getIsInCollection()); } ContentProviderOperation op = ContentProviderOperation .newUpdate(SeriesGuideContract.Episodes.CONTENT_URI) .withSelection(SeriesGuideContract.Shows.REF_SHOW_ID + "=" + episode.getShowTvdbId() + " AND " + SeriesGuideContract.Episodes.SEASON + "=" + episode.getSeasonNumber() + " AND " + SeriesGuideContract.Episodes.NUMBER + "=" + episode.getEpisodeNumber(), null) .withValues(values) .build(); batch.add(op); } // execute database update try { DBUtils.applyInSmallBatches(context, batch); } catch (OperationApplicationException e) { Timber.e(e, "flagsFromHexagon: failed to apply changed episode flag updates"); return false; } } // store new last sync time PreferenceManager.getDefaultSharedPreferences(context).edit() .putLong(HexagonSettings.KEY_LAST_SYNC_EPISODES, currentTime) .commit(); return true; } /** * Downloads watched, skipped or collected episodes of this show from Hexagon and applies * those flags to episodes in the database. * * @return Whether the download was successful and all changes were applied to the database. */ public static boolean flagsFromHexagon(Context context, int showTvdbId) { Timber.d("flagsFromHexagon: downloading episode flags for show " + showTvdbId); List<Episode> episodes; boolean hasMoreEpisodes = true; String cursor = null; Uri episodesOfShowUri = SeriesGuideContract.Episodes.buildEpisodesOfShowUri(showTvdbId); while (hasMoreEpisodes) { // abort if connection is lost if (!AndroidUtils.isNetworkConnected(context)) { Timber.e("flagsFromHexagon: no network connection"); return false; } try { // build request Episodes.Get request = HexagonTools.getEpisodesService(context).get() .setShowTvdbId(showTvdbId) .setLimit(EPISODE_MAX_BATCH_SIZE); if (!TextUtils.isEmpty(cursor)) { request.setCursor(cursor); } // execute request EpisodeList response = request.execute(); if (response == null) { break; } episodes = response.getEpisodes(); // check for more items if (response.getCursor() != null) { cursor = response.getCursor(); } else { hasMoreEpisodes = false; } } catch (IOException e) { Timber.e(e, "flagsFromHexagon: failed to download episode flags for show " + showTvdbId); return false; } if (episodes == null || episodes.size() == 0) { // nothing to do here break; } // build batch of episode flag updates ArrayList<ContentProviderOperation> batch = new ArrayList<>(); for (Episode episode : episodes) { ContentValues values = new ContentValues(); if (episode.getWatchedFlag() != null && episode.getWatchedFlag() != EpisodeFlags.UNWATCHED) { values.put(SeriesGuideContract.Episodes.WATCHED, episode.getWatchedFlag()); } if (episode.getIsInCollection() != null && episode.getIsInCollection()) { values.put(SeriesGuideContract.Episodes.COLLECTED, episode.getIsInCollection()); } if (values.size() == 0) { // skip if episode has neither a watched flag or is in collection continue; } ContentProviderOperation op = ContentProviderOperation .newUpdate(episodesOfShowUri) .withSelection(SeriesGuideContract.Episodes.SEASON + "=" + episode.getSeasonNumber() + " AND " + SeriesGuideContract.Episodes.NUMBER + "=" + episode.getEpisodeNumber(), null) .withValues(values) .build(); batch.add(op); } // execute database update try { DBUtils.applyInSmallBatches(context, batch); } catch (OperationApplicationException e) { Timber.e(e, "flagsFromHexagon: failed to apply episode flag updates for show " + showTvdbId); return false; } } return true; } } public static class Upload { private interface FlaggedEpisodesQuery { String[] PROJECTION = new String[] { SeriesGuideContract.Episodes._ID, SeriesGuideContract.Episodes.SEASON, SeriesGuideContract.Episodes.NUMBER, SeriesGuideContract.Episodes.WATCHED, SeriesGuideContract.Episodes.COLLECTED }; String SELECTION = SeriesGuideContract.Episodes.WATCHED + "!=" + EpisodeFlags.UNWATCHED + " OR " + SeriesGuideContract.Episodes.COLLECTED + "=1"; int SEASON = 1; int NUMBER = 2; int WATCHED = 3; int IN_COLLECTION = 4; } /** * Uploads all watched, skipped or collected episodes of this show to Hexagon. * * @return Whether the upload was successful. */ public static boolean flagsToHexagon(Context context, int showTvdbId) { Timber.d("flagsToHexagon: uploading episode flags for show " + showTvdbId); // query for watched, skipped or collected episodes Cursor query = context.getContentResolver() .query(SeriesGuideContract.Episodes.buildEpisodesOfShowUri(showTvdbId), FlaggedEpisodesQuery.PROJECTION, FlaggedEpisodesQuery.SELECTION, null, null ); if (query == null) { Timber.e("flagsToHexagon: episode flags query was null"); return false; } if (query.getCount() == 0) { Timber.d("flagsToHexagon: no episode flags to upload"); query.close(); return true; } // build list of episodes to upload List<Episode> episodes = new ArrayList<>(); while (query.moveToNext()) { Episode episode = new Episode(); episode.setSeasonNumber(query.getInt(FlaggedEpisodesQuery.SEASON)); episode.setEpisodeNumber(query.getInt(FlaggedEpisodesQuery.NUMBER)); int watchedFlag = query.getInt(FlaggedEpisodesQuery.WATCHED); if (!EpisodeTools.isUnwatched(watchedFlag)) { episode.setWatchedFlag(watchedFlag); } boolean isInCollection = EpisodeTools.isCollected( query.getInt(FlaggedEpisodesQuery.IN_COLLECTION)); if (isInCollection) { episode.setIsInCollection(true); } episodes.add(episode); // upload a batch if (episodes.size() == EPISODE_MAX_BATCH_SIZE || query.isLast()) { EpisodeList episodeList = new EpisodeList(); episodeList.setEpisodes(episodes); episodeList.setShowTvdbId(showTvdbId); try { HexagonTools.getEpisodesService(context).save(episodeList).execute(); } catch (IOException e) { // abort Timber.e(e, "flagsToHexagon: failed to upload episode flags for show " + showTvdbId); query.close(); return false; } // clear array episodes = new ArrayList<>(); } } query.close(); return true; } /** * Upload the given episodes to Hexagon. Assumes the given episode wrapper has valid * values. */ public static boolean flagsToHexagon(Context context, EpisodeList episodes) { try { HexagonTools.getEpisodesService(context).save(episodes).execute(); } catch (IOException e) { Timber.e(e, "flagsToHexagon: failed to upload episodes for show " + episodes.getShowTvdbId()); return false; } return true; } } }
/* * Copyright 2008 The MITRE Corporation (http://www.mitre.org/). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mitre.mrald.query; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; import java.util.TreeMap; import org.mitre.mrald.control.AbstractStep; import org.mitre.mrald.control.MsgObject; import org.mitre.mrald.parser.ParserElement; import org.mitre.mrald.util.Config; import org.mitre.mrald.util.FormTags; import org.mitre.mrald.util.MiscUtils; import org.mitre.mrald.util.MraldException; import org.mitre.mrald.util.MraldOutFile; /** * This QueryBuilder class builds the query string from a list of QueryElement * objects. * *@author Brian Blake *@created February 5, 2001 *@version 1.0 *@see mrald.presentation.QueryElement */ public class QueryBuilder extends AbstractStep { private ArrayList<String> fromStrings = new ArrayList<String>(); private ArrayList<String> groupByStrings = new ArrayList<String>(); private MsgObject msg; private ArrayList<String> orderByStrings = new ArrayList<String>(); // private int queryLimit = -1; //SQLComponents private ArrayList<String> selectStrings = new ArrayList<String>(); private ArrayList<String> whereAndStrings = new ArrayList<String>(); //private ArrayList whereOrStrings = new ArrayList(); /** * Constructor for the QueryBuilder object * *@since */ public QueryBuilder() { } public QueryBuilder(MsgObject msg) { this.msg = msg; } /** * This method is part of the AbstractStep interface and is called from the * workflow controller. * *@param msgObject Description of * the Parameter *@exception org.mitre.mrald.control.WorkflowStepException Description of * the Exception */ public void execute( MsgObject msgObject ) throws org.mitre.mrald.control.WorkflowStepException { try { msg = msgObject; setQueryLimit(); buildQueryComponents( msgObject.getWorkingObjects() ); msgObject.setQuery( buildQuery() ); } catch ( MraldException e ) { throw new org.mitre.mrald.control.WorkflowStepException( e ); } } /** * This method sets the Query output limit for referencing the output formatting. * *@exception MraldException Description of the Exception */ private void setQueryLimit() throws MraldException { try { String[] qSize = msg.getValue( "outputSize" ); if ( qSize[0].startsWith( "line" ) ) { // String[] limit = msg.getValue( "outputLinesCount" ); // queryLimit = Integer.parseInt( limit[0] ) + 1; } } catch ( Exception nfe ) { MraldException otherException = new MraldException( nfe.getMessage() ); throw otherException; } } /** * This methods builds the appropriate SQL statment for the query, given * the pre-generated query components * *@return Description of the Return Value *@exception MraldException Description of the Exception */ public String buildQuery() throws MraldException { String finalQueryString = ""; try { String start = "SELECT "; /* * find out if the user wants duplicate entries or not * if not, add DISTINCT to the query */ if ( !msg.getValue( "showDuplicates" )[0].equals( "true" ) ) { start += "DISTINCT "; } finalQueryString = order( selectStrings, start ); /* * need to run this here to see if any new tables need to be added * to fromStrings. Any table mentioned in the WHERE clause needs to be * in the FROM clause as well. runDijkstra will add tables to fromStrings. * The link string will be appended to the query below after the WhereOrStrings * are appended. */ MraldDijkstra links = new MraldDijkstra( msg ); ArrayList<LinkElement> linkArray = msg.getLinks(); MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "QueryBuilder : execute: get number of Links " + linkArray.size()); for (LinkElement test:linkArray) { MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "QueryBuilder : execute: get Links " + test.toString()); } String linkString = links.runDijkstra( fromStrings ); finalQueryString += " From "; for ( int l = 0; l < fromStrings.size(); l++ ) { String this_fstr = fromStrings.get( l ); finalQueryString += this_fstr; if ( l < fromStrings.size() - 1 ) { finalQueryString += ", "; } } if ( whereAndStrings.size() > 0 ) { finalQueryString += " Where "; for ( int l = 0; l < whereAndStrings.size(); l++ ) { String this_wstr = whereAndStrings.get( l ); finalQueryString += this_wstr; if ( l < whereAndStrings.size() - 1 ) { finalQueryString += " AND "; } } } /* * for ( int l = 0; l < whereOrStrings.size(); l++ ) * { * String this_wstr = ( String ) whereOrStrings.get( l ); * finalQueryString += this_wstr; * } */ if ( !linkString.equals( "" ) ) { if ( ( finalQueryString.indexOf( " Where " ) != -1 ) ) { finalQueryString += " AND " + linkString; } else { finalQueryString += " Where " + linkString; } } finalQueryString += order( groupByStrings, " GROUP BY " ); finalQueryString += order( orderByStrings, " ORDER BY " ); finalQueryString = MiscUtils.clearSemiColon( finalQueryString ); //strip off everything after a ';' or '--' to prevent SQL insertion attacks MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "QueryBuilder: query " + finalQueryString); return finalQueryString; } catch ( MraldException cde ) { // for ( int l = 0; l < fromStrings.size(); l++ ) // { // MraldOutFile.logToFile("From Strings : " + fromStrings.get(l)); // } MraldException e = new MraldException( "In QueryBuilder.buildQuery(): " + cde.getMessage() + "\nQuery so far is: " + finalQueryString ); throw e; } } /** * This method builds the Query Components from an Arraylist of SqlElements * - each sqlElement is responsible for adding the appropriate string to * the different compnent type (from clause, select clause, group bys, * etc.) * *@param qe Description of the Parameter *@exception MraldException Description of the Exception */ public void buildQueryComponents( ArrayList qe ) throws MraldException { /* * Check to see if the user has entered any output - if not throw exceptiom */ if ( ( qe.size() ) == 0 ) { throw new MraldException( "You must select output data to proceed. Please make selection from Output Data Selection list and resubmit." ); } /* * Each element can handle its portion of the query * Add all query strings specific to Output data */ SqlElements this_qe = null; for ( int k = 0; k < qe.size(); k++ ) { // Object temp = qe.get( k ); this_qe = ( SqlElements ) qe.get( k ); /* * If this object is null then move to the next element */ if ( this_qe == null ) { continue; } fromStrings = this_qe.buildFrom( fromStrings ); whereAndStrings = this_qe.buildWhereAnd( whereAndStrings ); //whereOrStrings = this_qe.buildWhereOr( whereOrStrings ); selectStrings = this_qe.buildSelect( selectStrings ); orderByStrings = this_qe.buildOrderBy( orderByStrings ); groupByStrings = this_qe.buildGroupBy( groupByStrings ); // MraldOutFile.appendToFile( this_qe.buildValue() ); } } /** * Returns the Contents of the vector as comma delimited String, preceded * by the leadIn * *@param orderByStrings - must contain only Strings *@param leadIn - The string to append to the beginning of the * list of return String *@return Description of the Return Value *@since */ private String order( ArrayList orderByStrings, String leadIn ) { String returnString = ""; if ( orderByStrings.size() != 0 ) { StringTokenizer thisField; TreeMap<Float,String> orderList = new TreeMap<Float,String>(); Float selectPos = new Float( 0 ); String fieldName; //GH HACK ALERT!!!! +50 because unlikely that there will be //this many output selections - jch: until ETMS - now 100 int lastString = orderByStrings.size() + 300; returnString += leadIn; for ( int h = 0; h < orderByStrings.size(); h++ ) { thisField = new StringTokenizer( orderByStrings.get( h ).toString(), FormTags.TOKENIZER_STR ); fieldName = thisField.nextToken(); selectPos = thisField.hasMoreTokens() ? new Float( thisField.nextToken() ) : new Float( ++lastString ); orderList.put( selectPos, fieldName ); } Collection sortedValues = orderList.values(); for ( Iterator it = sortedValues.iterator(); it.hasNext(); ) { returnString += it.next(); if ( it.hasNext() ) { returnString += ", "; } } } return returnString; } }
package com.psygate.xpylons.commands; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Stack; import java.util.UUID; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Material; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import com.psygate.xpylons.XPylonCommand; import com.psygate.xpylons.XPylons; import com.psygate.xpylons.entities.XPBlock; import com.psygate.xpylons.entities.XPylon; public class AdminCommands implements XPylonCommand { private static final String[] commands = { "xpylonslist", "xpylonspurge", "xpylonstemplate", "xpylonssetxp" }; private LinkedList<String> purger = new LinkedList<String>(); @Override public boolean onCommand(CommandSender sender, Command command, String alias, String[] args) { if (!sender.isOp()) { sender.sendMessage(ChatColor.RED + "Only operators can do that. Infraction logged."); } else { if (command.getName().equals(commands[0])) { listPylons(sender); } else if (command.getName().equals(commands[1])) { purge(sender, args); } else if (command.getName().equals(commands[2])) { setTemplate(sender); } else if (command.getName().equals(commands[3])) { setXPforPylon(sender, args); } } return true; } // CHECK FOR WORLD UID private void setXPforPylon(CommandSender sender, String[] args) { int action = 0; // 0 = set, 1 = add, 2 = subtract if (args.length != 5 && args.length != 1) { sender.sendMessage(XPylons.getInstance().getCommand(commands[3]).getUsage()); } else if (args.length == 5) { int xp, x, y, z; String world; try { xp = Integer.parseInt(args[0]); if (args[0].startsWith("+")) { action = 1; } else if (args[0].startsWith("-")) { action = 2; } else { action = 0; } x = Integer.parseInt(args[1]); y = Integer.parseInt(args[2]); z = Integer.parseInt(args[3]); world = args[4]; } catch (Exception e) { sender.sendMessage(XPylons.getInstance().getCommand(commands[3]).getUsage()); return; } XPylon pylon = XPylons.getDBLayer().getPylonFor(x, y, z, world); if (pylon == null) { world = Bukkit.getServer().getWorld(world).getUID().toString(); } pylon = XPylons.getDBLayer().getPylonFor(x, y, z, world); if (pylon == null) { sender.sendMessage(ChatColor.RED + "Pylon not found."); return; } if (action == 0) { pylon.setXp(xp); } else { pylon.setXp(pylon.getXp() + xp); } XPylons.getDBLayer().update(pylon); sender.sendMessage(ChatColor.GREEN + "Added " + xp + " xp to the pylon."); } else if (args.length == 1) { if (!(sender instanceof Player)) { sender.sendMessage(ChatColor.RED + "Only player admins can issue this command in this form."); return; } int xp; try { xp = Integer.parseInt(args[0]); } catch (Exception e) { sender.sendMessage(XPylons.getInstance().getCommand(commands[3]).getUsage()); return; } XPylon pylon = XPylons.getDBLayer().getPylonFor(((Player) sender).getLocation().getBlock()); if (pylon == null) { sender.sendMessage(ChatColor.RED + "Pylon not found. You are not close to a pylon."); return; } pylon.setXp(pylon.getXp() + xp); XPylons.getDBLayer().update(pylon); sender.sendMessage(ChatColor.GREEN + "Added " + xp + " xp to the pylon."); } else { sender.sendMessage(XPylons.getInstance().getCommand(commands[3]).getUsage()); } } @SuppressWarnings("deprecation") private void setTemplate(CommandSender sender) { if (!(sender instanceof Player)) { sender.sendMessage(ChatColor.RED + "Only players can do that."); } else { LinkedList<Block> template = new LinkedList<Block>(); @SuppressWarnings("deprecation") Block base = ((Player) sender).getTargetBlock(null, 10); if (base == null) { sender.sendMessage("No template in sight. Look at it and try again."); } else { Stack<Block> explore = new Stack<Block>(); explore.push(base); while (!explore.isEmpty()) { Block cur = explore.pop(); template.add(cur); for (BlockFace face : BlockFace.values()) { Block next = cur.getRelative(face); if (!cur.equals(next) && !template.contains(next) && next != null && next.getType() != null && next.getType() != Material.AIR) { explore.push(next); ((Player) sender).sendBlockChange(next.getLocation(), Material.REDSTONE_BLOCK, (byte) 0); } } if (template.size() > 100) { sender.sendMessage("Template to big. Make sure there is air around it everywhere."); return; } } } LinkedList<XPBlock> blocks = new LinkedList<XPBlock>(); for (Block b : template) { ((Player) sender).sendBlockChange(b.getLocation(), b.getType(), b.getData()); blocks.add(new XPBlock(b.getX() - base.getX(), b.getY() - base.getY(), b.getZ() - base.getZ(), b .getType())); } XPylons.getConfiguration().setTemplate(blocks); sender.sendMessage("Template set."); } } private void purge(CommandSender sender, String[] args) { if (!purger.contains(sender.getName())) { sender.sendMessage(ChatColor.RED + "IF YOU REALLY WANT TO PURGE ALL PYLONS, TYPE \"/xpylonspurge ALL\""); purger.add(sender.getName()); } else { purger.remove(sender.getName()); if (args.length != 1 || !args[0].equals("ALL")) { sender.sendMessage(ChatColor.RED + "IF YOU REALLY WANT TO PURGE ALL PYLONS, TYPE \"/xpylonspurge ALL\""); purger.add(sender.getName()); } else { XPylons.getDBLayer().purgeAll(); } } } private void listPylons(CommandSender sender) { sender.sendMessage(ChatColor.BLUE + "Pylons:"); List<XPylon> pylons = XPylons.getDBLayer().getPylons(); if (pylons.size() == 0) { sender.sendMessage(ChatColor.RED + "-- NO PYLONS --"); } else { for (XPylon pylon : pylons) { sender.sendMessage(ChatColor.WHITE + pylonString(pylon)); } } } private String pylonString(XPylon pylon) { String worldname = Bukkit.getWorld(UUID.fromString(pylon.getWorldUID())).getName(); return "(" + pylon.getId() + ")" + "[" + pylon.getCx() + "," + pylon.getCy() + "," + pylon.getCz() + "," + worldname + "] XP: " + pylon.getXp(); } @Override public String[] getCommands() { return commands; } }
package com.parse.starter; /** * */ import java.util.ArrayList; import android.app.Activity; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.AdapterView; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.TextView; import android.view.inputmethod.EditorInfo; import com.djchen.View.EditTextBackEvent; import com.djchen.database.DataBaseManipulation; import com.djchen.model.BudgetAdapter; import com.djchen.model.BudgetEntry; import com.parse.starter.R; public class BudgetFragment extends CustomFragment { //widgets on this fragment ListView mBudgetList; EditTextBackEvent mEditView;//edit view above keyboard TextView mEditCategoryName;//category name above keyboard BudgetAdapter mbudgetListAdapter; LinearLayout mEditLayoutContainer;//layout above keyboard EditText mBudgetTotal; TextView mBudgetTotalUsed; TextView mBudgetTotalLeft; DataBaseManipulation db; //helper instance to hide/show the attach view of keyboard private CheckPostionRunnable mPositionRunnable = new CheckPostionRunnable(); public Handler mhandler = new Handler(); public int selected_Budget_Type_Pos; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { //inflate layout and get viewer handle LayoutInflater inflate = (LayoutInflater)getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE); View view = inflate.inflate(R.layout.layout_budget, null); mBudgetList = (ListView)view.findViewById(R.id.budget_subcatgory_list); mEditLayoutContainer = (LinearLayout)view.findViewById(R.id.edit_layout_container); mEditView = (EditTextBackEvent)view.findViewById(R.id.budget_edit); mEditCategoryName = (TextView)view.findViewById(R.id.budget_edit_category_name); mBudgetTotal = (EditText)view.findViewById(R.id.budegt_total); mBudgetTotalUsed = (TextView)view.findViewById(R.id.budget_total_used); mBudgetTotalLeft = (TextView)view.findViewById(R.id.budget_total_left); //set listener to listen to IME_ACTION_DONE //when done is pressed on keyboard, we dismiss keyboard, //read user's input and write it into database mEditView.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { if(actionId == EditorInfo.IME_ACTION_DONE) { InputMethodManager inputManager = (InputMethodManager)getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); inputManager.hideSoftInputFromWindow(mEditView.getWindowToken(), 0); db.updateOrInsertBudget(mbudgetListAdapter.getItem(selected_Budget_Type_Pos).getCategory(), Double.parseDouble(mEditView.getText().toString())); mbudgetListAdapter.getItem(selected_Budget_Type_Pos).setBudgetAmount(Double.parseDouble(mEditView.getText().toString())); updateBudgetHeadInfo(); mbudgetListAdapter.notifyDataSetChanged(); } return false; } }); mBudgetList.setAdapter(mbudgetListAdapter); this.updateBudgetHeadInfo(); //populate the budget listView, and listen to click event to pop up keyboard and attach view. mBudgetList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { InputMethodManager inputManager = (InputMethodManager)getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); if(inputManager != null) { selected_Budget_Type_Pos = position; mEditCategoryName.setText(mbudgetListAdapter.getItem(position).getCategory()); mEditLayoutContainer.setVisibility(View.VISIBLE); startCheckPosition(); mEditView.setFocusableInTouchMode(true); mEditView.requestFocus(); inputManager.showSoftInput(mEditView, InputMethodManager.SHOW_IMPLICIT); } } }); return view; } @Override public void onAttach(Activity activity) { super.onAttach(activity); db = new DataBaseManipulation(getActivity()); this.getBudgetInfo(); } @Override public void onDetach() { super.onDetach(); db.closeDB(); } /** * This function is used to get budget info of current month, * info including: * total budget for this month * total budget for each category * total used for each category */ private void getBudgetInfo() { ArrayList<BudgetEntry> listEntry = new ArrayList<BudgetEntry>(); String[] categories = this.getResources().getStringArray(R.array.record_categories); for(int i = 0; i < categories.length; ++i) { BudgetEntry entry = db.queryBudgetAmountAndCost(categories[i], "2014-08-01", "2014-08-31"); listEntry.add(entry); } mbudgetListAdapter = new BudgetAdapter(this.getActivity(), R.layout.budget_category_row, listEntry); } private void updateBudgetHeadInfo() { double totalBudget = mbudgetListAdapter.getTotalBudget(); double totalSpent = mbudgetListAdapter.getTotalSpent(); double totalLeft = totalBudget - totalSpent; mBudgetTotal.setText(totalBudget + ""); mBudgetTotalUsed.setText(totalSpent + ""); mBudgetTotalLeft.setText(totalLeft + ""); } /** * Inner class to check the position of the linearLayout above the keyboard, * we need to use this position to decide when to hide the layout. */ public void startCheckPosition() { this.mPositionRunnable.stop(); this.mPositionRunnable.start(); new Thread(this.mPositionRunnable).start(); } public void stopCheckPosition() { this.mPositionRunnable.stop(); } public class CheckPostionRunnable implements Runnable { private int previousTop = -1; private volatile boolean needToCheck = false; public void start() { needToCheck = true; } public void stop() { needToCheck = false; } @Override public void run() { this.previousTop = -1; while(needToCheck) { if(previousTop == -1 || previousTop == 0) { previousTop =mEditLayoutContainer.getTop(); } else { int currenttop = mEditLayoutContainer.getTop(); if( currenttop > previousTop ) {// editText is going downward, we should hide it Log.i("DJ", "current: " + currenttop + "previous: " + previousTop); previousTop = currenttop; mhandler.post(new Runnable() { @Override public void run() { mEditLayoutContainer.setVisibility(View.GONE); } }); this.needToCheck = false; } else {//editText is going upward previousTop = currenttop; } if(currenttop != previousTop) { Log.i("DJ", "current: " + currenttop + "previous: " + previousTop); previousTop = currenttop; } } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.CellComparator.MetaCellComparator; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.BloomFilterWriter; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.Writable; /** * Common functionality needed by all versions of {@link HFile} writers. */ @InterfaceAudience.Private public class HFileWriterImpl implements HFile.Writer { private static final Log LOG = LogFactory.getLog(HFileWriterImpl.class); private static final long UNSET = -1; /** if this feature is enabled, preCalculate encoded data size before real encoding happens*/ public static final String UNIFIED_ENCODED_BLOCKSIZE_RATIO = "hbase.writer.unified.encoded.blocksize.ratio"; /** Block size limit after encoding, used to unify encoded block Cache entry size*/ private final int encodedBlockSizeLimit; /** The Cell previously appended. Becomes the last cell in the file.*/ protected Cell lastCell = null; /** FileSystem stream to write into. */ protected FSDataOutputStream outputStream; /** True if we opened the <code>outputStream</code> (and so will close it). */ protected final boolean closeOutputStream; /** A "file info" block: a key-value map of file-wide metadata. */ protected FileInfo fileInfo = new HFile.FileInfo(); /** Total # of key/value entries, i.e. how many times add() was called. */ protected long entryCount = 0; /** Used for calculating the average key length. */ protected long totalKeyLength = 0; /** Used for calculating the average value length. */ protected long totalValueLength = 0; /** Total uncompressed bytes, maybe calculate a compression ratio later. */ protected long totalUncompressedBytes = 0; /** Key comparator. Used to ensure we write in order. */ protected final CellComparator comparator; /** Meta block names. */ protected List<byte[]> metaNames = new ArrayList<>(); /** {@link Writable}s representing meta block data. */ protected List<Writable> metaData = new ArrayList<>(); /** * First cell in a block. * This reference should be short-lived since we write hfiles in a burst. */ protected Cell firstCellInBlock = null; /** May be null if we were passed a stream. */ protected final Path path; /** Cache configuration for caching data on write. */ protected final CacheConfig cacheConf; /** * Name for this object used when logging or in toString. Is either * the result of a toString on stream or else name of passed file Path. */ protected final String name; /** * The data block encoding which will be used. * {@link NoOpDataBlockEncoder#INSTANCE} if there is no encoding. */ protected final HFileDataBlockEncoder blockEncoder; protected final HFileContext hFileContext; private int maxTagsLength = 0; /** KeyValue version in FileInfo */ public static final byte [] KEY_VALUE_VERSION = Bytes.toBytes("KEY_VALUE_VERSION"); /** Version for KeyValue which includes memstore timestamp */ public static final int KEY_VALUE_VER_WITH_MEMSTORE = 1; /** Inline block writers for multi-level block index and compound Blooms. */ private List<InlineBlockWriter> inlineBlockWriters = new ArrayList<>(); /** block writer */ protected HFileBlock.Writer blockWriter; private HFileBlockIndex.BlockIndexWriter dataBlockIndexWriter; private HFileBlockIndex.BlockIndexWriter metaBlockIndexWriter; /** The offset of the first data block or -1 if the file is empty. */ private long firstDataBlockOffset = UNSET; /** The offset of the last data block or 0 if the file is empty. */ protected long lastDataBlockOffset = UNSET; /** * The last(stop) Cell of the previous data block. * This reference should be short-lived since we write hfiles in a burst. */ private Cell lastCellOfPreviousBlock = null; /** Additional data items to be written to the "load-on-open" section. */ private List<BlockWritable> additionalLoadOnOpenData = new ArrayList<>(); protected long maxMemstoreTS = 0; public HFileWriterImpl(final Configuration conf, CacheConfig cacheConf, Path path, FSDataOutputStream outputStream, CellComparator comparator, HFileContext fileContext) { this.outputStream = outputStream; this.path = path; this.name = path != null ? path.getName() : outputStream.toString(); this.hFileContext = fileContext; DataBlockEncoding encoding = hFileContext.getDataBlockEncoding(); if (encoding != DataBlockEncoding.NONE) { this.blockEncoder = new HFileDataBlockEncoderImpl(encoding); } else { this.blockEncoder = NoOpDataBlockEncoder.INSTANCE; } this.comparator = comparator != null? comparator: CellComparator.COMPARATOR; closeOutputStream = path != null; this.cacheConf = cacheConf; float encodeBlockSizeRatio = conf.getFloat(UNIFIED_ENCODED_BLOCKSIZE_RATIO, 1f); this.encodedBlockSizeLimit = (int)(hFileContext.getBlocksize() * encodeBlockSizeRatio); finishInit(conf); if (LOG.isTraceEnabled()) { LOG.trace("Writer" + (path != null ? " for " + path : "") + " initialized with cacheConf: " + cacheConf + " comparator: " + comparator.getClass().getSimpleName() + " fileContext: " + fileContext); } } /** * Add to the file info. All added key/value pairs can be obtained using * {@link HFile.Reader#loadFileInfo()}. * * @param k Key * @param v Value * @throws IOException in case the key or the value are invalid */ @Override public void appendFileInfo(final byte[] k, final byte[] v) throws IOException { fileInfo.append(k, v, true); } /** * Sets the file info offset in the trailer, finishes up populating fields in * the file info, and writes the file info into the given data output. The * reason the data output is not always {@link #outputStream} is that we store * file info as a block in version 2. * * @param trailer fixed file trailer * @param out the data output to write the file info to * @throws IOException */ protected final void writeFileInfo(FixedFileTrailer trailer, DataOutputStream out) throws IOException { trailer.setFileInfoOffset(outputStream.getPos()); finishFileInfo(); long startTime = System.currentTimeMillis(); fileInfo.write(out); HFile.updateWriteLatency(System.currentTimeMillis() - startTime); } /** * Checks that the given Cell's key does not violate the key order. * * @param cell Cell whose key to check. * @return true if the key is duplicate * @throws IOException if the key or the key order is wrong */ protected boolean checkKey(final Cell cell) throws IOException { boolean isDuplicateKey = false; if (cell == null) { throw new IOException("Key cannot be null or empty"); } if (lastCell != null) { int keyComp = comparator.compareKeyIgnoresMvcc(lastCell, cell); if (keyComp > 0) { throw new IOException("Added a key not lexically larger than" + " previous. Current cell = " + cell + ", lastCell = " + lastCell); } else if (keyComp == 0) { isDuplicateKey = true; } } return isDuplicateKey; } /** Checks the given value for validity. */ protected void checkValue(final byte[] value, final int offset, final int length) throws IOException { if (value == null) { throw new IOException("Value cannot be null"); } } /** * @return Path or null if we were passed a stream rather than a Path. */ @Override public Path getPath() { return path; } @Override public String toString() { return "writer=" + (path != null ? path.toString() : null) + ", name=" + name + ", compression=" + hFileContext.getCompression().getName(); } public static Compression.Algorithm compressionByName(String algoName) { if (algoName == null) return HFile.DEFAULT_COMPRESSION_ALGORITHM; return Compression.getCompressionAlgorithmByName(algoName); } /** A helper method to create HFile output streams in constructors */ protected static FSDataOutputStream createOutputStream(Configuration conf, FileSystem fs, Path path, InetSocketAddress[] favoredNodes) throws IOException { FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY); return FSUtils.create(conf, fs, path, perms, favoredNodes); } /** Additional initialization steps */ protected void finishInit(final Configuration conf) { if (blockWriter != null) { throw new IllegalStateException("finishInit called twice"); } blockWriter = new HFileBlock.Writer(blockEncoder, hFileContext); // Data block index writer boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite(); dataBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(blockWriter, cacheIndexesOnWrite ? cacheConf : null, cacheIndexesOnWrite ? name : null); dataBlockIndexWriter.setMaxChunkSize( HFileBlockIndex.getMaxChunkSize(conf)); dataBlockIndexWriter.setMinIndexNumEntries( HFileBlockIndex.getMinIndexNumEntries(conf)); inlineBlockWriters.add(dataBlockIndexWriter); // Meta data block index writer metaBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(); if (LOG.isTraceEnabled()) LOG.trace("Initialized with " + cacheConf); } /** * At a block boundary, write all the inline blocks and opens new block. * * @throws IOException */ protected void checkBlockBoundary() throws IOException { //for encoder like prefixTree, encoded size is not available, so we have to compare both encoded size //and unencoded size to blocksize limit. if (blockWriter.encodedBlockSizeWritten() >= encodedBlockSizeLimit || blockWriter.blockSizeWritten() >= hFileContext.getBlocksize()) { finishBlock(); writeInlineBlocks(false); newBlock(); } } /** Clean up the data block that is currently being written.*/ private void finishBlock() throws IOException { if (!blockWriter.isWriting() || blockWriter.blockSizeWritten() == 0) return; // Update the first data block offset if UNSET; used scanning. if (firstDataBlockOffset == UNSET) { firstDataBlockOffset = outputStream.getPos(); } // Update the last data block offset each time through here. lastDataBlockOffset = outputStream.getPos(); blockWriter.writeHeaderAndData(outputStream); int onDiskSize = blockWriter.getOnDiskSizeWithHeader(); Cell indexEntry = getMidpoint(this.comparator, lastCellOfPreviousBlock, firstCellInBlock); dataBlockIndexWriter.addEntry(CellUtil.getCellKeySerializedAsKeyValueKey(indexEntry), lastDataBlockOffset, onDiskSize); totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader(); if (cacheConf.shouldCacheDataOnWrite()) { doCacheOnWrite(lastDataBlockOffset); } } /** * Try to return a Cell that falls between <code>left</code> and * <code>right</code> but that is shorter; i.e. takes up less space. This * trick is used building HFile block index. Its an optimization. It does not * always work. In this case we'll just return the <code>right</code> cell. * * @param comparator * Comparator to use. * @param left * @param right * @return A cell that sorts between <code>left</code> and <code>right</code>. */ public static Cell getMidpoint(final CellComparator comparator, final Cell left, final Cell right) { // TODO: Redo so only a single pass over the arrays rather than one to // compare and then a second composing midpoint. if (right == null) { throw new IllegalArgumentException("right cell can not be null"); } if (left == null) { return right; } // If Cells from meta table, don't mess around. meta table Cells have schema // (table,startrow,hash) so can't be treated as plain byte arrays. Just skip // out without trying to do this optimization. if (comparator instanceof MetaCellComparator) { return right; } int diff = comparator.compareRows(left, right); if (diff > 0) { throw new IllegalArgumentException("Left row sorts after right row; left=" + CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right)); } byte[] midRow; boolean bufferBacked = left instanceof ByteBufferCell && right instanceof ByteBufferCell; if (diff < 0) { // Left row is < right row. if (bufferBacked) { midRow = getMinimumMidpointArray(((ByteBufferCell) left).getRowByteBuffer(), ((ByteBufferCell) left).getRowPosition(), left.getRowLength(), ((ByteBufferCell) right).getRowByteBuffer(), ((ByteBufferCell) right).getRowPosition(), right.getRowLength()); } else { midRow = getMinimumMidpointArray(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength()); } // If midRow is null, just return 'right'. Can't do optimization. if (midRow == null) return right; return CellUtil.createFirstOnRow(midRow); } // Rows are same. Compare on families. diff = CellComparator.compareFamilies(left, right); if (diff > 0) { throw new IllegalArgumentException("Left family sorts after right family; left=" + CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right)); } if (diff < 0) { if (bufferBacked) { midRow = getMinimumMidpointArray(((ByteBufferCell) left).getFamilyByteBuffer(), ((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(), ((ByteBufferCell) right).getFamilyByteBuffer(), ((ByteBufferCell) right).getFamilyPosition(), right.getFamilyLength()); } else { midRow = getMinimumMidpointArray(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); } // If midRow is null, just return 'right'. Can't do optimization. if (midRow == null) return right; // Return new Cell where we use right row and then a mid sort family. return CellUtil.createFirstOnRowFamily(right, midRow, 0, midRow.length); } // Families are same. Compare on qualifiers. diff = CellComparator.compareQualifiers(left, right); if (diff > 0) { throw new IllegalArgumentException("Left qualifier sorts after right qualifier; left=" + CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right)); } if (diff < 0) { if (bufferBacked) { midRow = getMinimumMidpointArray(((ByteBufferCell) left).getQualifierByteBuffer(), ((ByteBufferCell) left).getQualifierPosition(), left.getQualifierLength(), ((ByteBufferCell) right).getQualifierByteBuffer(), ((ByteBufferCell) right).getQualifierPosition(), right.getQualifierLength()); } else { midRow = getMinimumMidpointArray(left.getQualifierArray(), left.getQualifierOffset(), left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(), right.getQualifierLength()); } // If midRow is null, just return 'right'. Can't do optimization. if (midRow == null) return right; // Return new Cell where we use right row and family and then a mid sort qualifier. return CellUtil.createFirstOnRowCol(right, midRow, 0, midRow.length); } // No opportunity for optimization. Just return right key. return right; } /** * @param leftArray * @param leftOffset * @param leftLength * @param rightArray * @param rightOffset * @param rightLength * @return Return a new array that is between left and right and minimally * sized else just return null as indicator that we could not create a * mid point. */ private static byte[] getMinimumMidpointArray(final byte[] leftArray, final int leftOffset, final int leftLength, final byte[] rightArray, final int rightOffset, final int rightLength) { // rows are different int minLength = leftLength < rightLength ? leftLength : rightLength; int diffIdx = 0; while (diffIdx < minLength && leftArray[leftOffset + diffIdx] == rightArray[rightOffset + diffIdx]) { diffIdx++; } byte[] minimumMidpointArray = null; if (diffIdx >= minLength) { // leftKey's row is prefix of rightKey's. minimumMidpointArray = new byte[diffIdx + 1]; System.arraycopy(rightArray, rightOffset, minimumMidpointArray, 0, diffIdx + 1); } else { int diffByte = leftArray[leftOffset + diffIdx]; if ((0xff & diffByte) < 0xff && (diffByte + 1) < (rightArray[rightOffset + diffIdx] & 0xff)) { minimumMidpointArray = new byte[diffIdx + 1]; System.arraycopy(leftArray, leftOffset, minimumMidpointArray, 0, diffIdx); minimumMidpointArray[diffIdx] = (byte) (diffByte + 1); } else { minimumMidpointArray = new byte[diffIdx + 1]; System.arraycopy(rightArray, rightOffset, minimumMidpointArray, 0, diffIdx + 1); } } return minimumMidpointArray; } private static byte[] getMinimumMidpointArray(ByteBuffer left, int leftOffset, int leftLength, ByteBuffer right, int rightOffset, int rightLength) { // rows are different int minLength = leftLength < rightLength ? leftLength : rightLength; int diffIdx = 0; while (diffIdx < minLength && ByteBufferUtils.toByte(left, leftOffset + diffIdx) == ByteBufferUtils.toByte(right, rightOffset + diffIdx)) { diffIdx++; } byte[] minMidpoint = null; if (diffIdx >= minLength) { // leftKey's row is prefix of rightKey's. minMidpoint = new byte[diffIdx + 1]; ByteBufferUtils.copyFromBufferToArray(minMidpoint, right, rightOffset, 0, diffIdx + 1); } else { int diffByte = ByteBufferUtils.toByte(left, leftOffset + diffIdx); if ((0xff & diffByte) < 0xff && (diffByte + 1) < (ByteBufferUtils.toByte(right, rightOffset + diffIdx) & 0xff)) { minMidpoint = new byte[diffIdx + 1]; ByteBufferUtils.copyFromBufferToArray(minMidpoint, left, leftOffset, 0, diffIdx); minMidpoint[diffIdx] = (byte) (diffByte + 1); } else { minMidpoint = new byte[diffIdx + 1]; ByteBufferUtils.copyFromBufferToArray(minMidpoint, right, rightOffset, 0, diffIdx + 1); } } return minMidpoint; } /** Gives inline block writers an opportunity to contribute blocks. */ private void writeInlineBlocks(boolean closing) throws IOException { for (InlineBlockWriter ibw : inlineBlockWriters) { while (ibw.shouldWriteBlock(closing)) { long offset = outputStream.getPos(); boolean cacheThisBlock = ibw.getCacheOnWrite(); ibw.writeInlineBlock(blockWriter.startWriting( ibw.getInlineBlockType())); blockWriter.writeHeaderAndData(outputStream); ibw.blockWritten(offset, blockWriter.getOnDiskSizeWithHeader(), blockWriter.getUncompressedSizeWithoutHeader()); totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader(); if (cacheThisBlock) { doCacheOnWrite(offset); } } } } /** * Caches the last written HFile block. * @param offset the offset of the block we want to cache. Used to determine * the cache key. */ private void doCacheOnWrite(long offset) { HFileBlock cacheFormatBlock = blockWriter.getBlockForCaching(cacheConf); cacheConf.getBlockCache().cacheBlock( new BlockCacheKey(name, offset, true, cacheFormatBlock.getBlockType()), cacheFormatBlock); } /** * Ready a new block for writing. * * @throws IOException */ protected void newBlock() throws IOException { // This is where the next block begins. blockWriter.startWriting(BlockType.DATA); firstCellInBlock = null; if (lastCell != null) { lastCellOfPreviousBlock = lastCell; } } /** * Add a meta block to the end of the file. Call before close(). Metadata * blocks are expensive. Fill one with a bunch of serialized data rather than * do a metadata block per metadata instance. If metadata is small, consider * adding to file info using {@link #appendFileInfo(byte[], byte[])} * * @param metaBlockName * name of the block * @param content * will call readFields to get data later (DO NOT REUSE) */ @Override public void appendMetaBlock(String metaBlockName, Writable content) { byte[] key = Bytes.toBytes(metaBlockName); int i; for (i = 0; i < metaNames.size(); ++i) { // stop when the current key is greater than our own byte[] cur = metaNames.get(i); if (Bytes.BYTES_RAWCOMPARATOR.compare(cur, 0, cur.length, key, 0, key.length) > 0) { break; } } metaNames.add(i, key); metaData.add(i, content); } @Override public void close() throws IOException { if (outputStream == null) { return; } // Save data block encoder metadata in the file info. blockEncoder.saveMetadata(this); // Write out the end of the data blocks, then write meta data blocks. // followed by fileinfo, data block index and meta block index. finishBlock(); writeInlineBlocks(true); FixedFileTrailer trailer = new FixedFileTrailer(getMajorVersion(), getMinorVersion()); // Write out the metadata blocks if any. if (!metaNames.isEmpty()) { for (int i = 0; i < metaNames.size(); ++i) { // store the beginning offset long offset = outputStream.getPos(); // write the metadata content DataOutputStream dos = blockWriter.startWriting(BlockType.META); metaData.get(i).write(dos); blockWriter.writeHeaderAndData(outputStream); totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader(); // Add the new meta block to the meta index. metaBlockIndexWriter.addEntry(metaNames.get(i), offset, blockWriter.getOnDiskSizeWithHeader()); } } // Load-on-open section. // Data block index. // // In version 2, this section of the file starts with the root level data // block index. We call a function that writes intermediate-level blocks // first, then root level, and returns the offset of the root level block // index. long rootIndexOffset = dataBlockIndexWriter.writeIndexBlocks(outputStream); trailer.setLoadOnOpenOffset(rootIndexOffset); // Meta block index. metaBlockIndexWriter.writeSingleLevelIndex(blockWriter.startWriting( BlockType.ROOT_INDEX), "meta"); blockWriter.writeHeaderAndData(outputStream); totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader(); if (this.hFileContext.isIncludesMvcc()) { appendFileInfo(MAX_MEMSTORE_TS_KEY, Bytes.toBytes(maxMemstoreTS)); appendFileInfo(KEY_VALUE_VERSION, Bytes.toBytes(KEY_VALUE_VER_WITH_MEMSTORE)); } // File info writeFileInfo(trailer, blockWriter.startWriting(BlockType.FILE_INFO)); blockWriter.writeHeaderAndData(outputStream); totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader(); // Load-on-open data supplied by higher levels, e.g. Bloom filters. for (BlockWritable w : additionalLoadOnOpenData){ blockWriter.writeBlock(w, outputStream); totalUncompressedBytes += blockWriter.getUncompressedSizeWithHeader(); } // Now finish off the trailer. trailer.setNumDataIndexLevels(dataBlockIndexWriter.getNumLevels()); trailer.setUncompressedDataIndexSize( dataBlockIndexWriter.getTotalUncompressedSize()); trailer.setFirstDataBlockOffset(firstDataBlockOffset); trailer.setLastDataBlockOffset(lastDataBlockOffset); trailer.setComparatorClass(comparator.getClass()); trailer.setDataIndexCount(dataBlockIndexWriter.getNumRootEntries()); finishClose(trailer); blockWriter.release(); } @Override public void addInlineBlockWriter(InlineBlockWriter ibw) { inlineBlockWriters.add(ibw); } @Override public void addGeneralBloomFilter(final BloomFilterWriter bfw) { this.addBloomFilter(bfw, BlockType.GENERAL_BLOOM_META); } @Override public void addDeleteFamilyBloomFilter(final BloomFilterWriter bfw) { this.addBloomFilter(bfw, BlockType.DELETE_FAMILY_BLOOM_META); } private void addBloomFilter(final BloomFilterWriter bfw, final BlockType blockType) { if (bfw.getKeyCount() <= 0) return; if (blockType != BlockType.GENERAL_BLOOM_META && blockType != BlockType.DELETE_FAMILY_BLOOM_META) { throw new RuntimeException("Block Type: " + blockType.toString() + "is not supported"); } additionalLoadOnOpenData.add(new BlockWritable() { @Override public BlockType getBlockType() { return blockType; } @Override public void writeToBlock(DataOutput out) throws IOException { bfw.getMetaWriter().write(out); Writable dataWriter = bfw.getDataWriter(); if (dataWriter != null) dataWriter.write(out); } }); } @Override public HFileContext getFileContext() { return hFileContext; } /** * Add key/value to file. Keys must be added in an order that agrees with the * Comparator passed on construction. * * @param cell * Cell to add. Cannot be empty nor null. * @throws IOException */ @Override public void append(final Cell cell) throws IOException { // checkKey uses comparator to check we are writing in order. boolean dupKey = checkKey(cell); if (!dupKey) { checkBlockBoundary(); } if (!blockWriter.isWriting()) { newBlock(); } blockWriter.write(cell); totalKeyLength += CellUtil.estimatedSerializedSizeOfKey(cell); totalValueLength += cell.getValueLength(); // Are we the first key in this block? if (firstCellInBlock == null) { // If cell is big, block will be closed and this firstCellInBlock reference will only last // a short while. firstCellInBlock = cell; } // TODO: What if cell is 10MB and we write infrequently? We hold on to cell here indefinitely? lastCell = cell; entryCount++; this.maxMemstoreTS = Math.max(this.maxMemstoreTS, cell.getSequenceId()); int tagsLength = cell.getTagsLength(); if (tagsLength > this.maxTagsLength) { this.maxTagsLength = tagsLength; } } @Override public void beforeShipped() throws IOException { // Add clone methods for every cell if (this.lastCell != null) { this.lastCell = KeyValueUtil.toNewKeyCell(this.lastCell); } if (this.firstCellInBlock != null) { this.firstCellInBlock = KeyValueUtil.toNewKeyCell(this.firstCellInBlock); } if (this.lastCellOfPreviousBlock != null) { this.lastCellOfPreviousBlock = KeyValueUtil.toNewKeyCell(this.lastCellOfPreviousBlock); } } protected void finishFileInfo() throws IOException { if (lastCell != null) { // Make a copy. The copy is stuffed into our fileinfo map. Needs a clean // byte buffer. Won't take a tuple. byte [] lastKey = CellUtil.getCellKeySerializedAsKeyValueKey(this.lastCell); fileInfo.append(FileInfo.LASTKEY, lastKey, false); } // Average key length. int avgKeyLen = entryCount == 0 ? 0 : (int) (totalKeyLength / entryCount); fileInfo.append(FileInfo.AVG_KEY_LEN, Bytes.toBytes(avgKeyLen), false); fileInfo.append(FileInfo.CREATE_TIME_TS, Bytes.toBytes(hFileContext.getFileCreateTime()), false); // Average value length. int avgValueLen = entryCount == 0 ? 0 : (int) (totalValueLength / entryCount); fileInfo.append(FileInfo.AVG_VALUE_LEN, Bytes.toBytes(avgValueLen), false); if (hFileContext.getDataBlockEncoding() == DataBlockEncoding.PREFIX_TREE) { // In case of Prefix Tree encoding, we always write tags information into HFiles even if all // KVs are having no tags. fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false); } else if (hFileContext.isIncludesTags()) { // When tags are not being written in this file, MAX_TAGS_LEN is excluded // from the FileInfo fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false); boolean tagsCompressed = (hFileContext.getDataBlockEncoding() != DataBlockEncoding.NONE) && hFileContext.isCompressTags(); fileInfo.append(FileInfo.TAGS_COMPRESSED, Bytes.toBytes(tagsCompressed), false); } } protected int getMajorVersion() { return 3; } protected int getMinorVersion() { return HFileReaderImpl.MAX_MINOR_VERSION; } protected void finishClose(FixedFileTrailer trailer) throws IOException { // Write out encryption metadata before finalizing if we have a valid crypto context Encryption.Context cryptoContext = hFileContext.getEncryptionContext(); if (cryptoContext != Encryption.Context.NONE) { // Wrap the context's key and write it as the encryption metadata, the wrapper includes // all information needed for decryption trailer.setEncryptionKey(EncryptionUtil.wrapKey(cryptoContext.getConf(), cryptoContext.getConf().get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()), cryptoContext.getKey())); } // Now we can finish the close trailer.setMetaIndexCount(metaNames.size()); trailer.setTotalUncompressedBytes(totalUncompressedBytes+ trailer.getTrailerSize()); trailer.setEntryCount(entryCount); trailer.setCompressionCodec(hFileContext.getCompression()); long startTime = System.currentTimeMillis(); trailer.serialize(outputStream); HFile.updateWriteLatency(System.currentTimeMillis() - startTime); if (closeOutputStream) { outputStream.close(); outputStream = null; } } }
/** * Academic Use Licence * * These licence terms apply to all licences granted by * OXFORD UNIVERSITY INNOVATION LIMITED whose administrative offices are at * University Offices, Wellington Square, Oxford OX1 2JD, United Kingdom ("OUI") * for use of BuRST, a generic tool for collating error and debug information from * a number of distributed tools, and provides a subscription service so that * end-users can be informed of messages ("the Software") through this website * https://github.com/OxBRCInformatics/BuRST (the "Website"). * * PLEASE READ THESE LICENCE TERMS CAREFULLY BEFORE DOWNLOADING THE SOFTWARE * THROUGH THIS WEBSITE. IF YOU DO NOT AGREE TO THESE LICENCE TERMS YOU SHOULD NOT * [REQUEST A USER NAME AND PASSWORD OR] DOWNLOAD THE SOFTWARE. * * THE SOFTWARE IS INTENDED FOR USE BY ACADEMICS CARRYING OUT RESEARCH AND NOT FOR * USE BY CONSUMERS OR COMMERCIAL BUSINESSES. * * 1. Academic Use Licence * * 1.1 The Licensee is granted a limited non-exclusive and non-transferable * royalty free licence to download and use the Software provided that the * Licensee will: * * (a) limit their use of the Software to their own internal academic * non-commercial research which is undertaken for the purposes of * education or other scholarly use; * * (b) not use the Software for or on behalf of any third party or to * provide a service or integrate all or part of the Software into a * product for sale or license to third parties; * * (c) use the Software in accordance with the prevailing instructions and * guidance for use given on the Website and comply with procedures on * the Website for user identification, authentication and access; * * (d) comply with all applicable laws and regulations with respect to their * use of the Software; and * * (e) ensure that the Copyright Notice (c) 2016, Oxford University * Innovation Ltd." appears prominently wherever the Software is * reproduced and is referenced or cited with the Copyright Notice when * the Software is described in any research publication or on any * documents or other material created using the Software. * * 1.2 The Licensee may only reproduce, modify, transmit or transfer the * Software where: * * (a) such reproduction, modification, transmission or transfer is for * academic, research or other scholarly use; * * (b) the conditions of this Licence are imposed upon the receiver of the * Software or any modified Software; * * (c) all original and modified Source Code is included in any transmitted * software program; and * * (d) the Licensee grants OUI an irrevocable, indefinite, royalty free, * non-exclusive unlimited licence to use and sub-licence any modified * Source Code as part of the Software. * * 1.3 OUI reserves the right at any time and without liability or prior * notice to the Licensee to revise, modify and replace the functionality * and performance of the access to and operation of the Software. * * 1.4 The Licensee acknowledges and agrees that OUI owns all intellectual * property rights in the Software. The Licensee shall not have any right, * title or interest in the Software. * * 1.5 This Licence will terminate immediately and the Licensee will no longer * have any right to use the Software or exercise any of the rights * granted to the Licensee upon any breach of the conditions in Section 1 * of this Licence. * * 2. Indemnity and Liability * * 2.1 The Licensee shall defend, indemnify and hold harmless OUI against any * claims, actions, proceedings, losses, damages, expenses and costs * (including without limitation court costs and reasonable legal fees) * arising out of or in connection with the Licensee's possession or use of * the Software, or any breach of these terms by the Licensee. * * 2.2 The Software is provided on an "as is" basis and the Licensee uses the * Software at their own risk. No representations, conditions, warranties or * other terms of any kind are given in respect of the the Software and all * statutory warranties and conditions are excluded to the fullest extent * permitted by law. Without affecting the generality of the previous * sentences, OUI gives no implied or express warranty and makes no * representation that the Software or any part of the Software: * * (a) will enable specific results to be obtained; or * * (b) meets a particular specification or is comprehensive within its field * or that it is error free or will operate without interruption; or * * (c) is suitable for any particular, or the Licensee's specific purposes. * * 2.3 Except in relation to fraud, death or personal injury, OUI"s liability to * the Licensee for any use of the Software, in negligence or arising in any * other way out of the subject matter of these licence terms, will not * extend to any incidental or consequential damages or losses, or any loss * of profits, loss of revenue, loss of data, loss of contracts or * opportunity, whether direct or indirect. * * 2.4 The Licensee hereby irrevocably undertakes to OUI not to make any claim * against any employee, student, researcher or other individual engaged by * OUI, being a claim which seeks to enforce against any of them any * liability whatsoever in connection with these licence terms or their * subject-matter. * * 3. General * * 3.1 Severability - If any provision (or part of a provision) of these licence * terms is found by any court or administrative body of competent * jurisdiction to be invalid, unenforceable or illegal, the other * provisions shall remain in force. * * 3.2 Entire Agreement - These licence terms constitute the whole agreement * between the parties and supersede any previous arrangement, understanding * or agreement between them relating to the Software. * * 3.3 Law and Jurisdiction - These licence terms and any disputes or claims * arising out of or in connection with them shall be governed by, and * construed in accordance with, the law of England. The Licensee * irrevocably submits to the exclusive jurisdiction of the English courts * for any dispute or claim that arises out of or in connection with these * licence terms. * * If you are interested in using the Software commercially, please contact * Oxford University Innovation Limited to negotiate a licence. * Contact details are enquiries@innovation.ox.ac.uk quoting reference 14422. */ package ox.softeng.burst.domain.report; import ox.softeng.burst.domain.subscription.Severity; import ox.softeng.burst.domain.util.DomainClass; import ox.softeng.burst.util.SeverityEnum; import ox.softeng.burst.xml.MessageDTO; import ox.softeng.burst.xml.MetadataDTO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.persistence.*; import java.io.Serializable; import java.time.OffsetDateTime; import java.time.ZoneId; import java.util.HashSet; import java.util.List; import java.util.Set; @Entity @Table(name = "message", schema = "report", indexes = { @Index(columnList = "datetime_received", name = "index_datetime_received"), @Index(columnList = "severity_number", name = "index_severity_number"), @Index(columnList = "datetime_received,severity_number", name = "index_dr_s") }) @NamedQueries({ @NamedQuery(name = "message.with_severity_between_time", query = "select distinct m from Message m" + // " join fetch m.metadata metadata " + " join fetch m.topics topics " + " where m.dateTimeReceived < :endTime" + " and m.dateTimeReceived >= :startTime" + " and m.severityNumber >= :severity") }) @SequenceGenerator(name = "messagesIdSeq", sequenceName = "report.messages_id_seq", allocationSize = 1) public class Message extends DomainClass implements Serializable { private static final Logger logger = LoggerFactory.getLogger(Message.class); private static final long serialVersionUID = 1L; @Column(name = "datetime_created") protected OffsetDateTime dateTimeCreated; @Column(name = "datetime_received") protected OffsetDateTime dateTimeReceived; @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "messagesIdSeq") protected Long id = null; @Column(name = "message", columnDefinition = "TEXT") protected String message; @OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, mappedBy = "message") protected Set<Metadata> metadata; @Enumerated(EnumType.STRING) protected SeverityEnum severity; @Column(name = "severity_number") protected int severityNumber; protected String source; protected String title; @ElementCollection @CollectionTable(name = "message_topics", schema = "report", joinColumns = @JoinColumn(name = "message_id", referencedColumnName = "id" ), foreignKey = @ForeignKey(name = "fk_topics_messages"), indexes = @Index(columnList = "topic", name = "index_topic") ) @Column(name = "topic") protected Set<String> topics; public Message() { topics = new HashSet<>(); metadata = new HashSet<>(); } public Message(String source, String message, SeverityEnum severity, OffsetDateTime dateTimeCreated, String title) { this(); dateTimeReceived = OffsetDateTime.now(ZoneId.of("UTC")); this.source = source; this.message = message; this.severity = severity; this.dateTimeCreated = dateTimeCreated; this.title = title; } public void addMetadata(String key, String value) { metadata.add(new Metadata(key, value, this)); } public void addTopic(String topic) { topics.add(topic); } public OffsetDateTime getDateTimeCreated() { return dateTimeCreated; } public void setDateTimeCreated(OffsetDateTime dateCreated) { this.dateTimeCreated = dateCreated; } public Long getId() { return id; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public Set<Metadata> getMetadata() { return metadata; } public void setMetadata(Set<Metadata> metadata) { this.metadata = metadata; } public SeverityEnum getSeverity() { return severity; } public void setSeverity(SeverityEnum severity) { this.severity = severity; } public String getSource() { return source; } public void setSource(String source) { this.source = source; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public Set<String> getTopics() { return topics; } public void setTopics(Set<String> topics) { this.topics = topics; } public boolean hasTitle() { return title != null; } @PrePersist public void updateSeverityNumber() { if (severity != null) { this.severityNumber = severity.ordinal(); } } public static List<Message> findAllMessagesBySeverityBetweenTime(EntityManagerFactory entityManagerFactory, Severity severity, OffsetDateTime startTimestamp, OffsetDateTime endTimestamp) { logger.trace("Searching for all messages by severity between time"); EntityManager entityManager = entityManagerFactory.createEntityManager(); TypedQuery<Message> msgQuery = entityManager.createNamedQuery("message.with_severity_between_time", Message.class); msgQuery.setParameter("endTime", endTimestamp); msgQuery.setParameter("startTime", startTimestamp); msgQuery.setParameter("severity", severity.getSeverity().ordinal()); List<Message> matchedMessages = msgQuery.getResultList(); entityManager.close(); logger.trace("Found {} messages with severity {} between {} and {} ", matchedMessages.size(), severity, startTimestamp.toString(), endTimestamp.toString()); return matchedMessages; } public static Message generateMessage(MessageDTO messageDTO) { Message msg = new Message(messageDTO.getSource(), messageDTO.getDetails(), messageDTO.getSeverity(), messageDTO.getDateTimeCreated(), messageDTO.getTitle()); messageDTO.getTopics().forEach(msg::addTopic); if (messageDTO.getMetadata() != null) { for (MetadataDTO md : messageDTO.getMetadata()) { msg.addMetadata(md.getKey(), md.getValue()); } } return msg; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.plugin.core.archive; import java.io.*; import java.text.SimpleDateFormat; import java.util.Date; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; import java.util.zip.ZipEntry; import docking.ActionContext; import docking.action.DockingAction; import docking.action.MenuData; import docking.tool.ToolConstants; import ghidra.app.CorePluginPackage; import ghidra.framework.main.*; import ghidra.framework.model.*; import ghidra.framework.plugintool.*; import ghidra.framework.plugintool.util.PluginStatus; import ghidra.framework.preferences.Preferences; import ghidra.util.HelpLocation; import ghidra.util.Msg; import ghidra.util.task.*; /** * The archive plugin provides menu action from the front end allowing the * user to archive a project or restore an archived project. */ //@formatter:off @PluginInfo( status = PluginStatus.RELEASED, packageName = CorePluginPackage.NAME, category = ArchivePlugin.GROUP_NAME, shortDescription = "Archives/restores projects", description = "The archive plugin provides a menu action from the project window allowing the user to archive a project or restore an archived project. " ) //@formatter:on public class ArchivePlugin extends Plugin implements FrontEndOnly, ProjectListener { private static final String PROJECT_GROUP_C_2 = "CProject2"; static final String LAST_ARCHIVE_DIR = "last.project.archive.dir"; static final String TOOL_RUNNING_TITLE = "Cannot Archive while Tools are Running"; static final String GROUP_NAME = "Archiving"; static final String ARCHIVE_EXTENSION = ".gar"; static final String DOT_DOT_DOT = ". . ."; static final String TOOLS_FOLDER_NAME = "tools"; static final String GROUPS_FOLDER_NAME = "groups"; static final String SAVE_FOLDER_NAME = "save"; static final String ARCHIVE_ERROR_TITLE = "Error Archiving Project"; static final String RESTORE_ERROR_TITLE = "Error Restoring Project"; static final String DB_LOCK_EXT = ".ulock"; static final String JAR_VERSION_TAG = "JAR_FORMAT"; static final SimpleDateFormat formatter = new SimpleDateFormat("yyyy_MM_dd"); // project properties file should not be restored static final String PROJECT_PROPERTY_FILE = "project.prp"; // current project state file should not be restored static final String PROJECT_STATE_FILE = "projectState"; // old project state folder should not be restored static final String OLD_PROJECT_SAVE_DIR = "save"; // old project group folder should not be restored static final String OLD_PROJECT_GROUPS_DIR = "groups"; // Old folder properties file should not be restored static final String OLD_FOLDER_PROPERTIES_FILE = ".properties"; private ArchiveDialog archiveDialog; private RestoreDialog restoreDialog; private String lastRestoreArchivePathName; // The path name of the archive file. private ProjectLocator lastRestoreLocator; private DockingAction archiveAction; private DockingAction restoreAction; private volatile boolean isArchiving; private volatile boolean isRestoring; private TaskListener archivingListener; private TaskListener restoringListener; ////////////////////////////////////////////////////////////////// /** * The archive plugin provides menu action from the front end allowing the * user to archive a project or restore an archived project. * @param tool the tool that contains this plugin. The actions will only * appear if the tool is the Ghidra front end tool. */ public ArchivePlugin(PluginTool tool) { super(tool); setupActions(); } @Override public void dispose() { super.dispose(); } ///////////////////////////////////////////////////////////////////// /** * @see ghidra.framework.model.ProjectListener#projectClosed(Project) */ @Override public void projectClosed(Project project) { archiveAction.setEnabled(false); restoreAction.setEnabled(true); } /** * @see ghidra.framework.model.ProjectListener#projectOpened(Project) */ @Override public void projectOpened(Project project) { archiveAction.setEnabled(true); restoreAction.setEnabled(false); } /** * for JUnits... */ boolean isArchiving() { return isArchiving; } /** * for JUnits... */ boolean isRestoring() { return isRestoring; } /** * Sets up the menu actions that this plugin wants to listen for */ private void setupActions() { archiveAction = new DockingAction("Archive Project", getName()) { @Override public void actionPerformed(ActionContext context) { archiveProject(); } }; ProjectManager projectManager = tool.getProjectManager(); String[] archiveMenuPath = { ToolConstants.MENU_FILE, "Archive Current Project..." }; archiveAction.setMenuBarData(new MenuData(archiveMenuPath, PROJECT_GROUP_C_2)); archiveAction.setEnabled(projectManager.getActiveProject() != null); archiveAction.setHelpLocation(new HelpLocation("FrontEndPlugin", "Archive_Project")); restoreAction = new DockingAction("Restore Archived Project", getName()) { @Override public void actionPerformed(ActionContext context) { restoreProject(); } }; String[] restoreMenuPath = { ToolConstants.MENU_FILE, "Restore Project..." }; restoreAction.setMenuBarData(new MenuData(restoreMenuPath, PROJECT_GROUP_C_2)); restoreAction.setEnabled(projectManager.getActiveProject() == null); restoreAction.setHelpLocation(new HelpLocation("FrontEndPlugin", "Restore_Project")); if (tool instanceof FrontEndTool) { tool.addAction(archiveAction); tool.addAction(restoreAction); ((FrontEndTool) tool).addProjectListener(this); } if (tool.getProject() == null) { archiveAction.setEnabled(false); } else { restoreAction.setEnabled(false); } } /** * menu listener for File | Archive ... */ private void archiveProject() { FrontEndTool feTool = (FrontEndTool) tool; Project activeProject = AppInfo.getActiveProject(); if (activeProject.getToolManager().getRunningTools().length > 0) { Msg.showInfo(getClass(), tool.getToolFrame(), TOOL_RUNNING_TITLE, "You must close running tools before starting the archive process."); return; } activeProject.saveToolTemplate("FRONTEND", feTool.saveToolToToolTemplate()); activeProject.save(); if (archiveDialog == null) { archiveDialog = new ArchiveDialog(this); } ProjectLocator projectLocator = activeProject.getProjectLocator(); String archivePathName = getArchivePathName(projectLocator); if (!archiveDialog.showDialog(projectLocator, archivePathName, tool)) { return; } archivePathName = archiveDialog.getArchivePathName(); File archiveJar = new File(archivePathName); File parentFile = archiveJar.getParentFile(); Preferences.setProperty(LAST_ARCHIVE_DIR, parentFile.getAbsolutePath()); isArchiving = true; archivingListener = new TaskListener() { @Override public void taskCompleted(Task task) { isArchiving = false; } @Override public void taskCancelled(Task task) { isArchiving = false; } }; Task task = new ArchiveTask(activeProject, archiveJar); task.addTaskListener(archivingListener); new TaskLauncher(task, tool.getToolFrame()); } private String getArchivePathName(ProjectLocator projectLocator) { String defaultDirString = projectLocator.getLocation(); String dirString = Preferences.getProperty(LAST_ARCHIVE_DIR, defaultDirString); String dateString = formatter.format(new Date()); String projectName = projectLocator.getName(); if (!dirString.endsWith(File.separator)) { dirString = dirString + File.separator; } return dirString + projectName + "_" + dateString + ArchivePlugin.ARCHIVE_EXTENSION; } /** * menu listener for File | Restore Archive ... */ private void restoreProject() { if (restoreDialog == null) { restoreDialog = new RestoreDialog(this); } String archiveName = lastRestoreArchivePathName; ProjectLocator locator = lastRestoreLocator; if (!restoreDialog.showDialog(archiveName, locator)) { return; } lastRestoreArchivePathName = restoreDialog.getArchivePathName(); lastRestoreLocator = restoreDialog.getRestoreURL(); File archiveJar = new File(lastRestoreArchivePathName); try { if (!isJarFormat(archiveJar)) { Msg.showError(this, null, "File Format Error", "Can't read the file: " + lastRestoreArchivePathName); return; } } catch (IOException e) { Msg.showError(this, null, "File Format Error", "Can't read the file: " + lastRestoreArchivePathName, e); return; } isRestoring = true; restoringListener = new TaskListener() { @Override public void taskCompleted(Task task) { isRestoring = false; } @Override public void taskCancelled(Task task) { isRestoring = false; } }; Task task = new RestoreTask(lastRestoreLocator, archiveJar, this); task.addTaskListener(restoringListener); new TaskLauncher(task, tool.getToolFrame()); } /** * Return true if the jar file contains the JAR_FORMAT tag to indicate * the new jar file format. * @param jarFile * @throws IOException */ private boolean isJarFormat(File jarFile) throws IOException { JarInputStream jarIn = new JarInputStream(new FileInputStream(jarFile)); JarEntry entry = jarIn.getNextJarEntry(); // the next entry should be JAR_FORMAT entry = jarIn.getNextJarEntry(); String format = entry.getName(); jarIn.close(); if (format.equalsIgnoreCase(ArchivePlugin.JAR_VERSION_TAG)) { return true; } return false; } /** * Gets the project name for the indicated project archive file. * @param archivePathName the archive project file. * @return the project name or null if the file is not a valid * project archive file. */ static String getProjectName(String archivePathName) { if (archivePathName == null) { return null; } File archiveFile = new File(archivePathName); FileInputStream fileIn = null; JarInputStream jarIn = null; try { fileIn = new FileInputStream(archiveFile); jarIn = new JarInputStream(fileIn); while (true) { ZipEntry zipEntry = jarIn.getNextEntry(); if (zipEntry == null) { break; } String name = zipEntry.getName(); jarIn.closeEntry(); if (name.endsWith(ProjectLocator.getProjectExtension())) { int endIndex = name.length() - ProjectLocator.getProjectExtension().length(); String projectName = name.substring(0, endIndex); return projectName; } } } catch (IOException e) { // just return null below } finally { if (jarIn != null) { try { jarIn.close(); } catch (IOException e) { // we tried } } if (fileIn != null) { try { fileIn.close(); } catch (IOException e) { // we tried } } } return null; } void cleanupRestoredProject(ProjectLocator projectLocator) { ProjectManager projectManager = tool.getProjectManager(); // delete the project at the given project location if (!projectManager.deleteProject(projectLocator)) { Msg.showError(this, null, "All Files in Project not Removed", "Not all files have been deleted from project " + projectLocator.getName()); } } }
package com.oracle.st.pm.json.movieTicketing.docStore; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.oracle.st.pm.json.movieTicketing.qbe.GetDocumentById; import com.oracle.st.pm.json.movieTicketing.utilitiy.CollectionManager; import com.oracle.st.pm.json.movieTicketing.utilitiy.DBConnection; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import oracle.soda.OracleBatchException; import oracle.soda.OracleCollection; import oracle.soda.OracleCursor; import oracle.soda.OracleDatabase; import oracle.soda.OracleDocument; import oracle.soda.OracleException; import oracle.soda.OracleOperationBuilder; public class SodaCollection { public static final String ISO_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssXXX"; protected static final Gson gson = new GsonBuilder().setDateFormat(ISO_DATE_FORMAT).create(); private static final SimpleDateFormat sdf = new SimpleDateFormat(SodaCollection.ISO_DATE_FORMAT); public SodaCollection() { super(); } protected static long getDocumentCount(OracleDatabase db, String collectionName) throws OracleException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); if (collection != null) { OracleOperationBuilder documents = collection.find(); long result = documents.count(); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.getDocumentCount(\"" + collectionName + "\"): Found " + result + " documents. Elapsed time " + elapsedTime + " ms."); return result; } else { long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.getDocumentCount(\"" + collectionName + "\"): Collection not found. Elapsed time " + elapsedTime + " ms."); return 0; } } protected static OracleDocument[] getDocuments(OracleDatabase db, String collectionName, int limit) throws OracleException, IOException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); if (collection != null) { OracleOperationBuilder operation = collection.find(); if (limit > -1) { operation.limit(limit); } OracleCursor cursor = operation.getCursor(); ArrayList<OracleDocument> documentList = new ArrayList<OracleDocument>(); while (cursor.hasNext()){ documentList.add(cursor.next()); } cursor.close(); OracleDocument[] documents = documentList.toArray(new OracleDocument[0]); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.getDocuments(\"" + collectionName + "\"): Returned " + documents.length + " documents in " + elapsedTime + " ms."); return documents; } else { long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.getDocuments(\"" + collectionName + "\"): Elapsed time " + elapsedTime + " ms."); return new OracleDocument[0]; } } protected static OracleDocument getDocument(OracleDatabase db, String collectionName, String key) throws OracleException, IOException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); OracleDocument document = collection.findOne(key); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.getDocument(\"" + collectionName + "\"): Elapsed time " + elapsedTime + " ms. Key = \"" + key + "\","); return document; } protected static OracleDocument getDocumentById(OracleDatabase db, String collectionName, int id) throws OracleException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); GetDocumentById qbeDefinition = new GetDocumentById(id); OracleDocument qbe = db.createDocumentFromString(gson.toJson(qbeDefinition)); OracleOperationBuilder operation = collection.find().filter(qbe); OracleDocument doc = operation.getOne(); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.getMovieById(\"" + collectionName + "\"): Returned 1 document in " + elapsedTime + " ms. QBE Expression: \"" + gson.toJson(qbeDefinition) + "\"."); return doc; } protected static OracleDocument[] searchCollection(OracleDatabase db, String collectionName, String qbeDefinition) throws OracleException, IOException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); OracleDocument qbe = db.createDocumentFromString(qbeDefinition); OracleOperationBuilder operation = collection.find().filter(qbe); OracleCursor cursor = operation.getCursor(); ArrayList<OracleDocument> documentList = new ArrayList<OracleDocument>(); while (cursor.hasNext()){ documentList.add(cursor.next()); } cursor.close(); OracleDocument[] documents = documentList.toArray(new OracleDocument[0]); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.searchCollection(\""+ collectionName +"\"): Returned " + documents.length + " documents in " + elapsedTime + " ms. QBE Expression: \"" + qbeDefinition + "\"."); return documents; } protected static OracleDocument insertDocument(OracleDatabase db, String collectionName,OracleDocument doc) throws OracleException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); doc = collection.insertAndGet(doc); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.insertDocument(\"" + collectionName + "\"): Inserted 1 document in " + elapsedTime + " ms. "); return doc; } protected static OracleDocument insertDocument(OracleDatabase db, String collectionName,Object object) throws OracleException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); OracleDocument doc = collection.insertAndGet(db.createDocumentFromString(gson.toJson(object))); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.insertDocument(\"" + collectionName + "\"): Inserted 1 document in " + elapsedTime + " ms. "); return doc; } protected static List<OracleDocument> bulkInsert2(OracleCollection collection, List<OracleDocument> documents) throws OracleException { long startTime = System.currentTimeMillis(); System.out.println("Invoking insertAndGet"); List<OracleDocument> results = collection.insertAndGet(documents.iterator()); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.bulkInsert(\""+ collection.admin().getName() +"\"): Inserted " + documents.size() + " documents in " + elapsedTime + " ms."); return results; } protected static List<OracleDocument> bulkInsert1(OracleDatabase db, String collectionName, List<OracleDocument> documents) throws OracleException { OracleCollection collection = db.openCollection(collectionName); System.out.println("Obtained Soda Collection"); return bulkInsert2(collection,documents); } protected static List<OracleDocument> bulkInsert(OracleDatabase db, String collectionName, SodaCollection[] documents) throws OracleException { if (documents.length > 0) { List<OracleDocument> documentList = new ArrayList<OracleDocument>(); for (int i=0; i<documents.length;i++) { OracleDocument doc = db.createDocumentFromString(gson.toJson(documents[i])); documentList.add(doc); } System.out.println("Converted Array to ArrayList"); try { insertDocument(db,collectionName,documents[0]); System.out.println("Insert Succeeded"); return bulkInsert1(db,collectionName,documentList); } catch (OracleBatchException obe) { System.out.println(obe.getProcessedCount()); System.out.println(gson.toJson(documents[obe.getProcessedCount()])); obe.printStackTrace(); } } return new ArrayList<OracleDocument>(); } protected static boolean updateDocument(OracleDatabase db, String collectionName, String key, String version, OracleDocument newDocument) throws OracleException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); OracleOperationBuilder operation = collection.find().key(key).version(version); boolean status = operation.replaceOne(newDocument); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.updateDocument(\"" + collectionName + "\"): Updated 1 document in " + elapsedTime + " ms. "); return status; } protected static OracleCollection createIndex(OracleCollection collection, OracleDocument indexDefinition) throws OracleException { System.out.println(indexDefinition.getContentAsString()); long startTime = System.currentTimeMillis(); collection.admin().createIndex(indexDefinition); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.createIndex(\""+ collection.admin().getName() +"\"): Created Index in " + elapsedTime + " ms."); return collection; } protected static OracleCollection createCollection(OracleDatabase db, String collectionName) throws OracleException { JsonObject collectionDefinition = getCollectionProperties(collectionName); OracleDocument collectionProperties = null ; if (collectionDefinition != null) { if (collectionDefinition.has("indexes")) { collectionDefinition.remove("indexes"); } collectionProperties = db.createDocumentFromString(gson.toJson(collectionDefinition)); } long startTime = System.currentTimeMillis(); OracleCollection collection = db.admin().createCollection(collectionName, collectionProperties); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.createCollection(\""+ collectionName +"\"): Created collection in " + elapsedTime + " ms."); return collection; } protected static OracleCollection createCollection(OracleDatabase db, String collectionName, OracleDocument collectionProperties) throws OracleException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.admin().createCollection(collectionName, collectionProperties); long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.createCollection(\""+ collectionName +"\"): Created collection in " + elapsedTime + " ms."); return collection; } protected static void dropCollection(OracleDatabase db, String collectionName) throws OracleException { long startTime = System.currentTimeMillis(); OracleCollection collection = db.openCollection(collectionName); if (collection != null) { collection.admin().drop(); } long elapsedTime = System.currentTimeMillis() - startTime; System.out.println("MovieTicketing.dropCollection(\""+ collectionName +"\"): Dropped collection in " + elapsedTime + " ms."); } protected static OracleCollection recreateCollection(OracleDatabase db, String collectionName) throws OracleException { return recreateCollection(db,collectionName,null); } protected static OracleCollection recreateCollection(OracleDatabase db, String collectionName, List<OracleDocument> documents) throws OracleException { JsonObject collectionDefinition = getCollectionProperties(collectionName); JsonArray indexMetadata = null; if ((collectionDefinition != null) && (collectionDefinition.has("indexes"))) { indexMetadata = collectionDefinition.getAsJsonArray("indexes"); collectionDefinition.remove("indexes"); } OracleDocument collectionProperties = null; if (collectionDefinition != null) { collectionProperties = db.createDocumentFromString(gson.toJson(collectionDefinition)); } dropCollection(db,collectionName); OracleCollection collection = createCollection(db,collectionName,collectionProperties); if ((documents != null) && (documents.size() > 0)) { bulkInsert2(collection,documents); } if (indexMetadata != null) { for (int i = 0; i < indexMetadata.size(); i++) { JsonObject indexDefinition = indexMetadata.get(i).getAsJsonObject(); // System.out.println(indexDefinition.toString()); if ((indexDefinition.has("spatial")) && (!DBConnection.isNearSupported())) { System.out.println(sdf.format(new Date()) + ": Skipped creation of unsupported spatial index"); } else { createIndex(collection,db.createDocumentFromString(gson.toJson(indexDefinition))); } } } return collection; } protected static void createIndexes(OracleDatabase db, String collectionName, JsonArray indexMetadata) throws OracleException { OracleCollection collection = db.openCollection(collectionName); for (int i = 0; i < indexMetadata.size(); i++) { JsonObject indexDefinition = indexMetadata.get(i).getAsJsonObject(); // System.out.println(indexDefinition.toString()); if ((indexDefinition.has("spatial")) && (!DBConnection.isNearSupported())) { System.out.println(sdf.format(new Date()) + ": Skipped creation of unsupported spatial index"); } else { createIndex(collection,db.createDocumentFromString(gson.toJson(indexDefinition))); } } } protected static void createIndexes(OracleDatabase db, String collectionName) throws OracleException { JsonObject collectionDefinition = CollectionManager.collectionMetadata.getAsJsonObject(collectionName); if ((collectionDefinition == null) || (!collectionDefinition.has("indexes"))) { return; } JsonArray indexMetadata = indexMetadata = collectionDefinition.getAsJsonArray("indexes"); createIndexes(db,collectionName,indexMetadata); } protected static JsonObject getCollectionProperties(String collectionName) { // Clone the collection Metadata JsonParser p = new JsonParser(); JsonObject collectionDefinition = CollectionManager.collectionMetadata.getAsJsonObject(collectionName); if (collectionDefinition != null) { collectionDefinition = p.parse(gson.toJson(collectionDefinition)).getAsJsonObject(); } return collectionDefinition; } public String toJSON() { return gson.toJson(this); } public static SodaCollection fromJSON(String json) { return gson.fromJson(json, SodaCollection.class); } }
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans.factory.support; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.BeanInstantiationException; import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.cglib.core.ClassLoaderAwareGeneratorStrategy; import org.springframework.cglib.core.SpringNamingPolicy; import org.springframework.cglib.proxy.Callback; import org.springframework.cglib.proxy.CallbackFilter; import org.springframework.cglib.proxy.Enhancer; import org.springframework.cglib.proxy.Factory; import org.springframework.cglib.proxy.MethodInterceptor; import org.springframework.cglib.proxy.MethodProxy; import org.springframework.cglib.proxy.NoOp; import org.springframework.core.ResolvableType; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * Default object instantiation strategy for use in BeanFactories. * * <p>Uses CGLIB to generate subclasses dynamically if methods need to be * overridden by the container to implement <em>Method Injection</em>. * * @author Rod Johnson * @author Juergen Hoeller * @author Sam Brannen * @since 1.1 */ public class CglibSubclassingInstantiationStrategy extends SimpleInstantiationStrategy { /** * Index in the CGLIB callback array for passthrough behavior, * in which case the subclass won't override the original class. */ private static final int PASSTHROUGH = 0; /** * Index in the CGLIB callback array for a method that should * be overridden to provide <em>method lookup</em>. */ private static final int LOOKUP_OVERRIDE = 1; /** * Index in the CGLIB callback array for a method that should * be overridden using generic <em>method replacer</em> functionality. */ private static final int METHOD_REPLACER = 2; @Override protected Object instantiateWithMethodInjection(RootBeanDefinition bd, @Nullable String beanName, BeanFactory owner) { return instantiateWithMethodInjection(bd, beanName, owner, null); } @Override protected Object instantiateWithMethodInjection(RootBeanDefinition bd, @Nullable String beanName, BeanFactory owner, @Nullable Constructor<?> ctor, Object... args) { // Must generate CGLIB subclass... return new CglibSubclassCreator(bd, owner).instantiate(ctor, args); } /** * An inner class created for historical reasons to avoid external CGLIB dependency * in Spring versions earlier than 3.2. */ private static class CglibSubclassCreator { private static final Class<?>[] CALLBACK_TYPES = new Class<?>[] {NoOp.class, LookupOverrideMethodInterceptor.class, ReplaceOverrideMethodInterceptor.class}; private final RootBeanDefinition beanDefinition; private final BeanFactory owner; CglibSubclassCreator(RootBeanDefinition beanDefinition, BeanFactory owner) { this.beanDefinition = beanDefinition; this.owner = owner; } /** * Create a new instance of a dynamically generated subclass implementing the * required lookups. * @param ctor constructor to use. If this is {@code null}, use the * no-arg constructor (no parameterization, or Setter Injection) * @param args arguments to use for the constructor. * Ignored if the {@code ctor} parameter is {@code null}. * @return new instance of the dynamically generated subclass */ public Object instantiate(@Nullable Constructor<?> ctor, Object... args) { Class<?> subclass = createEnhancedSubclass(this.beanDefinition); Object instance; if (ctor == null) { instance = BeanUtils.instantiateClass(subclass); } else { try { Constructor<?> enhancedSubclassConstructor = subclass.getConstructor(ctor.getParameterTypes()); instance = enhancedSubclassConstructor.newInstance(args); } catch (Exception ex) { throw new BeanInstantiationException(this.beanDefinition.getBeanClass(), "Failed to invoke constructor for CGLIB enhanced subclass [" + subclass.getName() + "]", ex); } } // SPR-10785: set callbacks directly on the instance instead of in the // enhanced class (via the Enhancer) in order to avoid memory leaks. Factory factory = (Factory) instance; factory.setCallbacks(new Callback[] {NoOp.INSTANCE, new LookupOverrideMethodInterceptor(this.beanDefinition, this.owner), new ReplaceOverrideMethodInterceptor(this.beanDefinition, this.owner)}); return instance; } /** * Create an enhanced subclass of the bean class for the provided bean * definition, using CGLIB. */ private Class<?> createEnhancedSubclass(RootBeanDefinition beanDefinition) { Enhancer enhancer = new Enhancer(); enhancer.setSuperclass(beanDefinition.getBeanClass()); enhancer.setNamingPolicy(SpringNamingPolicy.INSTANCE); if (this.owner instanceof ConfigurableBeanFactory) { ClassLoader cl = ((ConfigurableBeanFactory) this.owner).getBeanClassLoader(); enhancer.setStrategy(new ClassLoaderAwareGeneratorStrategy(cl)); } enhancer.setCallbackFilter(new MethodOverrideCallbackFilter(beanDefinition)); enhancer.setCallbackTypes(CALLBACK_TYPES); return enhancer.createClass(); } } /** * Class providing hashCode and equals methods required by CGLIB to * ensure that CGLIB doesn't generate a distinct class per bean. * Identity is based on class and bean definition. */ private static class CglibIdentitySupport { private final RootBeanDefinition beanDefinition; public CglibIdentitySupport(RootBeanDefinition beanDefinition) { this.beanDefinition = beanDefinition; } public RootBeanDefinition getBeanDefinition() { return this.beanDefinition; } @Override public boolean equals(@Nullable Object other) { return (other != null && getClass() == other.getClass() && this.beanDefinition.equals(((CglibIdentitySupport) other).beanDefinition)); } @Override public int hashCode() { return this.beanDefinition.hashCode(); } } /** * CGLIB callback for filtering method interception behavior. */ private static class MethodOverrideCallbackFilter extends CglibIdentitySupport implements CallbackFilter { private static final Log logger = LogFactory.getLog(MethodOverrideCallbackFilter.class); public MethodOverrideCallbackFilter(RootBeanDefinition beanDefinition) { super(beanDefinition); } @Override public int accept(Method method) { MethodOverride methodOverride = getBeanDefinition().getMethodOverrides().getOverride(method); if (logger.isTraceEnabled()) { logger.trace("MethodOverride for " + method + ": " + methodOverride); } if (methodOverride == null) { return PASSTHROUGH; } else if (methodOverride instanceof LookupOverride) { return LOOKUP_OVERRIDE; } else if (methodOverride instanceof ReplaceOverride) { return METHOD_REPLACER; } throw new UnsupportedOperationException("Unexpected MethodOverride subclass: " + methodOverride.getClass().getName()); } } /** * CGLIB MethodInterceptor to override methods, replacing them with an * implementation that returns a bean looked up in the container. */ private static class LookupOverrideMethodInterceptor extends CglibIdentitySupport implements MethodInterceptor { private final BeanFactory owner; public LookupOverrideMethodInterceptor(RootBeanDefinition beanDefinition, BeanFactory owner) { super(beanDefinition); this.owner = owner; } @Override public Object intercept(Object obj, Method method, Object[] args, MethodProxy mp) throws Throwable { // Cast is safe, as CallbackFilter filters are used selectively. LookupOverride lo = (LookupOverride) getBeanDefinition().getMethodOverrides().getOverride(method); Assert.state(lo != null, "LookupOverride not found"); Object[] argsToUse = (args.length > 0 ? args : null); // if no-arg, don't insist on args at all if (StringUtils.hasText(lo.getBeanName())) { Object bean = (argsToUse != null ? this.owner.getBean(lo.getBeanName(), argsToUse) : this.owner.getBean(lo.getBeanName())); // Detect package-protected NullBean instance through equals(null) check return (bean.equals(null) ? null : bean); } else { // Find target bean matching the (potentially generic) method return type ResolvableType genericReturnType = ResolvableType.forMethodReturnType(method); return (argsToUse != null ? this.owner.getBeanProvider(genericReturnType).getObject(argsToUse) : this.owner.getBeanProvider(genericReturnType).getObject()); } } } /** * CGLIB MethodInterceptor to override methods, replacing them with a call * to a generic MethodReplacer. */ private static class ReplaceOverrideMethodInterceptor extends CglibIdentitySupport implements MethodInterceptor { private final BeanFactory owner; public ReplaceOverrideMethodInterceptor(RootBeanDefinition beanDefinition, BeanFactory owner) { super(beanDefinition); this.owner = owner; } @Override public Object intercept(Object obj, Method method, Object[] args, MethodProxy mp) throws Throwable { ReplaceOverride ro = (ReplaceOverride) getBeanDefinition().getMethodOverrides().getOverride(method); Assert.state(ro != null, "ReplaceOverride not found"); // TODO could cache if a singleton for minor performance optimization MethodReplacer mr = this.owner.getBean(ro.getMethodReplacerBeanName(), MethodReplacer.class); return mr.reimplement(obj, method, args); } } }
/* Copyright 2008 Matt Radkie Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * * $Revision$ * $Date$ * $Author$ * $HeadURL$ * */ package org.rlcommunity.rlglue.codec.taskspec; import org.rlcommunity.rlglue.codec.taskspec.ranges.DoubleRange; import org.rlcommunity.rlglue.codec.taskspec.ranges.IntRange; /** * This class is used to store and parse the information given to an Agent in * the RL-Glue framework. The Task Spec stores information the Agent needs * regarding the environment, such as the number of actions, observations, and * their ranges. For more information please read the * <a href="http://rlai.cs.ualberta.ca/RLBB/TaskSpecification.html"> * RL-Glue Task Spec info</a> * <p> * This class was written to handle future revisions to the Task Spec while * providing backwards compatibility with old Task Spec versions. This is * accomplished through the use of the factory design pattern. The * TaskSpecDelegate object is the medium through which the TaskSpec communicates * with different implementations of the Task Spec versions. So far, each * Task Spec version has incrementally added functionality, but this might not * always be the case, so this framework was designed to robustly accomodate * future versions. * * <h2>This most recent Implementation of TaskSpecDelegate is TaskSpecVRLGLUE3</h2> * * @author Matt Radkie */ public class TaskSpec { /** * Generic object extended by all versions of TaskSpec parsers */ private TaskSpecDelegate theTSO = null; /** * The version of the Task Spec. */ private String TSVersion = "0"; /** * Gets the Task Spec version. * @deprecated Moving to a string version * * @return Integer value of the Task Spec version. */ public int getVersion() { try { return (Integer.parseInt(TSVersion)); } catch (NumberFormatException ex) { System.err.println("Asked for version of task spec through deprecated int method and could not make it work. Version is: " + getVersionString()); return 0; } } /** * Quick sanity check. It parses the task spec into an Task Spec TSA. Then, it * uses the string representation of TSA to make TSAB. Finally, it makes sure that the * string representation of TSA is the same as TSB. * @param theTaskSpecString * @return Whether this task spec appears to be valid */ public static boolean checkTaskSpec(String theTaskSpecString) { TaskSpec TS = new TaskSpec(theTaskSpecString); try { boolean match = TS.getStringRepresentation().equals(new TaskSpec(TS.getStringRepresentation()).getStringRepresentation()); if (!match) { System.err.println("Task spec check failed: if the task spec means what we think it means, these two should be equal:"); System.err.println("First Construction:\t" + TS.getStringRepresentation()); System.err.println("Second Construction:\t" + new TaskSpec(TS.getStringRepresentation()).getStringRepresentation()); } return match; } catch (Throwable T) { System.err.println("There is a problem parsing the task spec you were checking: " + T); return false; } } /** * @since RL-Glue3.0 * @return a string representing the version of the task spec that has been parsed */ public String getVersionString() { return theTSO.getVersionString(); } /** * Constructor that takes a string adhereing to the Task Spec language * protocol. This string is parsed out by the appropriate version of the * Task Spec. * * @param taskSpec String following the Task Spec language */ public TaskSpec(String taskSpec) { String errorAccumulator = "Task Spec Parse Results:"; try { theTSO = new TaskSpecVRLGLUE3(taskSpec); TSVersion = theTSO.getVersionString(); } catch (Exception e) { errorAccumulator += "\nParsing as TaskSpecVRLGLUE3: " + e.toString(); } if (theTSO == null) { try { TaskSpecV3 theV3TSO = new TaskSpecV3(taskSpec); //Later in here, make a taskSpecVRLGlue3 constructor that takes a taskspecv3 theTSO = new TaskSpecVRLGLUE3(theV3TSO); TSVersion = "3"; } catch (Exception e) { errorAccumulator += "\nParsing as V3: " + e.toString(); } } if (theTSO == null) { try { TaskSpecV2 oldV2Spec = new TaskSpecV2(taskSpec); TaskSpecV3 newerV3Spec = new TaskSpecV3(oldV2Spec); theTSO = new TaskSpecVRLGLUE3(newerV3Spec); TSVersion = "2"; } catch (Exception e) { errorAccumulator += "\nParsing as V2: " + e.toString(); } } if (theTSO == null) { try { TaskSpecVersionOnly versionOnlySpec = new TaskSpecVersionOnly(taskSpec); theTSO = new TaskSpecVRLGLUE3(versionOnlySpec); TSVersion = theTSO.getVersionString(); } catch (Exception e) { errorAccumulator += "\nParsing as TaskSpecVersionOnly: " + e.toString(); } } if (theTSO == null) { System.err.println("Task spec couldn't be parsed"); throw new IllegalArgumentException(errorAccumulator); } } public TaskSpec(TaskSpecDelegate theTaskSpecDelegate) { this.theTSO = theTaskSpecDelegate; this.TSVersion = theTaskSpecDelegate.getVersionString(); } /** * Returns the string representation of the Task Spec object. This string * representation follows the Task Spec language as outlined * <a href="http://rlai.cs.ualberta.ca/RLBB/TaskSpecification.html"> * here</a> * * * @deprecated We never should have overloaded toString in this way. * @return String representation of the Task Spec */ public String toString() { return getStringRepresentation(); } /** * Returns the string representation of the Task Spec object. This string * representation follows the Task Spec language as outlined * <a href="http://glue.rl-community.org/Home/rl-glue/task-spec-language"> * http://glue.rl-community.org/Home/rl-glue/task-spec-language</a> * * @return String representation of the Task Spec */ public String getStringRepresentation() { return theTSO.getStringRepresentation(); } /** * Returns a string containing debug information about the Task Spec. This * debug information is usually printed to the screen, but returning it as * a string allows the caller to print it out to log files etc as well. * @deprecated This is dumb. * * @return String containing debug information for the Task Spec. */ public String dump() { return theTSO.dump(); } /** * Checks if the observation min at index is negative infinity. * * @param index Integer index of the obs_min array. * @return True if obs_min[index] is negative infinity, false otherwise. */ public boolean isObsMinNegInfinity(int index) { return theTSO.isObsMinNegInfinity(index); } /** * Checks if the action min at index is negative infinity. * * @param index - integer index of the action_mins array. * @return True if action_min[index] is negative infinity, false otherwise. */ public boolean isActionMinNegInfinity(int index) { return theTSO.isActionMinNegInfinity(index); } /** * Checks if the observation max at index is positive infinity. * * @param index Interger index of the obs_maxs array. * @return True if obs_max[index] is positive infinity, false otherwise. */ public boolean isObsMaxPosInfinity(int index) { return theTSO.isObsMaxPosInfinity(index); } /** * Checks if the action max at index is positive infinity. * * @param index Integer index of the action_maxs array. * @return True if action_max[index] is positive infinity, false otherwise. */ public boolean isActionMaxPosInfinity(int index) { return theTSO.isActionMaxPosInfinity(index); } /** * Checks if the observation min at index is unknown. * * @param index Integer index of the obs_mins array. * @return True if the min value for observation[index] is unknown, false * otherwise. */ public boolean isObsMinUnknown(int index) { return theTSO.isObsMinUnknown(index); } /** * Checks if the observation max at index is unknown. * * @param index Integer index of the obs_max array. * @return True if the max value for observation[index] is unknown, false * otherwise. */ public boolean isObsMaxUnknown(int index) { return theTSO.isObsMaxUnknown(index); } /** * Checks if the min action at index is unknown. * * @param index Integer index of the action_mins array. * @return True if the min value for action[index] is unknown, false * otherwise. */ public boolean isActionMinUnknown(int index) { return theTSO.isActionMinUnknown(index); } /** * Checks if the action max at index is unknown. * * @param index Integer index of the action_maxs array. * @return True if the max value for action[index] is unknown, false * otherwise. */ public boolean isActionMaxUnknown(int index) { return theTSO.isActionMaxUnknown(index); } /** * Checks if the min reward is negative infinity. * * * @return True if the min reward is negative infinity, false * otherwise. */ public boolean isMinRewardNegInf() { return theTSO.isMinRewardNegInf(); } /** * Checks if the max reward is positive infinity. * * * @return True if the max reward is positive infinity, false * otherwise. */ public boolean isMaxRewardInf() { return theTSO.isMaxRewardInf(); } /** * Checks if the min reward is unknown. * * * @return True if the min reward is unknown, false * otherwise. */ public boolean isMinRewardUnknown() { return theTSO.isMinRewardUnknown(); } /** * Checks if the max reward is unknown. * * * @return True if the max reward is unknown, false * otherwise. */ public boolean isMaxRewardUnknown() { return theTSO.isMaxRewardUnknown(); } /** * Gets the version of the Task spec. * * @deprecated Use getVersionString * @return the version of the Task Spec used. */ public double getTaskSpecVersion() { return theTSO.getVersion(); } /** * Gets the episodic characteristic of the Task Spec. * * * @return Char value representing if an environment is episodic * @deprecated use getProblemType() */ public char getEpisodic() { return theTSO.getEpisodic(); } /** * Gets the size of the observation array (Number of observations) * @deprecated This is useless. * * @return The size of the observation array (Number of observations) */ public int getObsDim() { return theTSO.getObsDim(); } /** * Gets the number of descrete observations. * * * @return Integer value for the number of descrete observations */ public int getNumDiscreteObsDims() { return theTSO.getNumDiscreteObsDims(); } /** * Gets the number of continuous observations. * * * @return Integer value for the number of continuous observations. */ public int getNumContinuousObsDims() { return theTSO.getNumContinuousObsDims(); } /** * Gets the types for the observations. * @deprecated I don't like this anymore. * * @return Character array representing the types of the observations. */ public char[] getObsTypes() { return theTSO.getObsTypes(); } /** * Gets the array of mins for the observations. * @deprecated I don't like this anymore. * * @return double[] Array of the min values for the observations. */ public double[] getObsMins() { return theTSO.getObsMins(); } /** * Gets the array of maxs for the observations. * @deprecated I don't like this anymore. * * @return double[] Array of the maxs values for the observations. */ public double[] getObsMaxs() { return theTSO.getObsMaxs(); } /** * Gets the size of the action array (Number of actions) * * * @return The size of the action array (Number of actions) */ public int getActionDim() { return theTSO.getActionDim(); } /** * Gets the number of descrete actions * * * @return Integer number of descrete actions. */ public int getNumDiscreteActionDims() { return theTSO.getNumDiscreteActionDims(); } /** * Gets the number of continous actions * * * @return Integer number of continous actions. */ public int getNumContinuousActionDims() { return theTSO.getNumContinuousActionDims(); } /** * Gets the types for the actions. * * @deprecated I don't like this anymore. * * @return Character array representing the types of the actions. */ public char[] getActionTypes() { return theTSO.getActionTypes(); } /** * Gets the array of mins for the actions. * * @deprecated I don't like this anymore. * * @return double[] Array of the min values for the actions. */ public double[] getActionMins() { return theTSO.getActionMins(); } /** * Gets the array of maxs for the actions. * * @deprecated I don't like this anymore. * * @return double[] Array of the max values for the actions. */ public double[] getActionMaxs() { return theTSO.getActionMaxs(); } /** * Gets the max reward. * * * @return Double value of the max reward. */ public double getRewardMax() { return theTSO.getRewardMax(); } /** * Gets the min reward. * * * @return Double value of the min reward. */ public double getRewardMin() { return theTSO.getRewardMin(); } /** * Gets the string value for the ExtraString. * * 'ExtraString' is new for Task Spec version 3. It allows additional * information to be appended to the end of the Task Spec. When environments * use this feature, agents will require special code to handle this. * * * @return String of additional information appended onto the end of the * Task Spec. */ public String getExtraString() { return theTSO.getExtraString(); } /** * Gets the version of the parser used on the Task Spec. * * * @return Integer version of the parser used on the Task Spec. */ public int getParserVersion() { return theTSO.getParserVersion(); } /** * Main has no purpose in this class other than for debugging. This should * have been deleted prior to release, but as it makes on going development * easier, it has been left for now. Ideally in the future, this code will * be removed and moved into test cases. * * @param args */ public static void main(String[] args) { /* String sampleTS = "2:e:2_[f,f]_[-1.2,0.6]_[-0.07,0.07]:1_[i]_[0,2]"; TaskSpec theTSO = new TaskSpec(sampleTS); System.out.println(sampleTS+" is version: "+theTSO.getVersion()); sampleTS="2:e:2_[f,f]_[-1.2,0.6]_[-0.07,0.07]:1_[i]_[0,2]:[]"; theTSO=new TaskSpec(sampleTS); System.out.println(sampleTS+" is version: "+theTSO.getVersion()); sampleTS="2:e:2_[f,f]_[-1.2,0.6]_[-0.07,0.07]:1_[i]_[0,2]:[0,3]"; theTSO=new TaskSpec(sampleTS); System.out.println(sampleTS+" is version: "+theTSO.getVersion()); sampleTS = "2:e:2_[f,f]_[-1.2,0.6]_[-0.07,0.07]:1_[i]_[0,2]:[0,3]:Extra strings and stuff here"; theTSO = new TaskSpec(sampleTS); System.out.println(sampleTS + " is version: " + theTSO.getVersion() + "\n" + theTSO.toString()); System.out.println(theTSO.dump()); sampleTS="2:e:2_[f,f]_[-1.2,0.6]_[-0.07,0.07]:1_[i]_[0,2]:[0,3]:"; theTSO=new TaskSpec(sampleTS); System.out.println(sampleTS+" is version: "+theTSO.getVersion()); sampleTS="2:e:[0,3]"; theTSO=new TaskSpec(sampleTS); System.out.println(sampleTS+" is version: "+theTSO.getVersion()); */ } /** * Get the discount factor. * @since RL-Glue-3.0 */ public double getDiscountFactor() { return theTSO.getDiscountFactor(); } /** * Get the min, max, and special information for the i'th integer observation. * @since RL-Glue-3.0 * @param i */ public IntRange getDiscreteObservationRange(int i) { return theTSO.getDiscreteObservationRange(i); } /** * Get the min, max, and special information for the i'th integer action. * @since RL-Glue-3.0 * @param i */ public IntRange getDiscreteActionRange(int i) { return theTSO.getDiscreteActionRange(i); } /** * Get the min, max, and special information for the i'th double observation. * @since RL-Glue-3.0 * @param i */ public DoubleRange getContinuousObservationRange(int i) { return theTSO.getContinuousObservationRange(i); } /** * Get the min, max, and special information for the i'th double action. * @since RL-Glue-3.0 * @param i */ public DoubleRange getContinuousActionRange(int i) { return theTSO.getContinuousActionRange(i); } /** * Get the range of rewards * @since RL-Glue-3.0 */ public DoubleRange getRewardRange() { return theTSO.getRewardRange(); } /** * Replacement for getEpisodic * @return episodic | continuous | something else * @since RL-Glue-3.0 */ String getProblemType() { return theTSO.getProblemType(); } }
package linenux.command; import java.time.Clock; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import linenux.command.parser.FreeTimeArgumentParser; import linenux.command.result.CommandResult; import linenux.control.TimeParserManager; import linenux.model.Schedule; import linenux.model.Task; import linenux.time.parser.ISODateWithTimeParser; import linenux.time.parser.StandardDateWithTimeParser; import linenux.time.parser.TodayWithTimeParser; import linenux.time.parser.TomorrowWithTimeParser; import linenux.util.ArrayListUtil; import linenux.util.Either; import linenux.util.LocalDateTimeUtil; import linenux.util.TimeInterval; //@@author A0144915A public class FreeTimeCommand extends AbstractCommand { private static final String TRIGGER_WORD = "freetime"; private static final String DESCRIPTION = "Find a free time slot."; private static final String COMMAND_FORMAT = "freetime [st/START_TIME] et/END_TIME"; private Schedule schedule; private TimeParserManager timeParserManager; private FreeTimeArgumentParser argumentParser; /** * Constructs an {@code FreeTimeCommand}. * @param schedule The {@code Schedule} to look for free time. */ public FreeTimeCommand(Schedule schedule) { this(schedule, Clock.systemDefaultZone()); } /** * Constructs an {@code FreeTimeCommand}. * @param schedule The {@code Schedule} to look for free time. * @param clock The {@code Clock} used to determine the current time. */ public FreeTimeCommand(Schedule schedule, Clock clock) { this.schedule = schedule; this.timeParserManager = new TimeParserManager(new ISODateWithTimeParser(), new StandardDateWithTimeParser(), new TodayWithTimeParser(), new TomorrowWithTimeParser()); this.argumentParser = new FreeTimeArgumentParser(this.timeParserManager, clock); this.TRIGGER_WORDS.add(TRIGGER_WORD); } /** * Executes the command based on {@code userInput}. This method operates under the assumption that * {@code respondTo(userInput)} is {@code true}. * @param userInput A {@code String} representing the user input. * @return A {@code CommandResult} representing the result of the command. */ @Override public CommandResult execute(String userInput) { assert userInput.matches(getPattern()); assert this.schedule != null; String argument = extractArgument(userInput); Either<TimeInterval, CommandResult> queryInterval = this.argumentParser.parse(argument); if (queryInterval.isRight()) { return queryInterval.getRight(); } ArrayList<TimeInterval> freetime = getFreeTime(queryInterval.getLeft()); if (freetime.isEmpty()) { return this.makeNoFreeTimeResult(); } else { return makeResult(freetime); } } /** * @return A {@code String} representing the default command word. */ @Override public String getTriggerWord() { return TRIGGER_WORD; } /** * @return A {@code String} describing what this {@code Command} does. */ @Override public String getDescription() { return DESCRIPTION; } /** * @return A {@code String} describing the format that this {@code Command} expects. */ @Override public String getCommandFormat() { return COMMAND_FORMAT; } /** * Compute available free time in the {@code queryInterval}. * @param queryInterval The {@code TimeInterval} to look for free time. * @return An {@code ArrayList} of free time represented by {@code TimeInterval}. */ private ArrayList<TimeInterval> getFreeTime(TimeInterval queryInterval) { ArrayList<TimeInterval> eventIntervals = eventIntervals(queryInterval, this.schedule.getTaskList()); ArrayList<TimeInterval> busyIntervals = flattenIntervals(eventIntervals); return timeIntervalSubtraction(queryInterval, busyIntervals); } /** * Return the time intervals of all events happening within {@code queryInterval}. It is guaranteed that all * intervals are bounded by queryInterval, that is, for all x in output, x intersect queryInterval == x. * @param queryInterval The {@code TimeInterval} to bound the query. * @param tasks The {@code ArrayList} of tasks. * @return An {@code ArrayList} of {@code TimeInterval} for the events. */ private ArrayList<TimeInterval> eventIntervals(TimeInterval queryInterval, ArrayList<Task> tasks) { return new ArrayListUtil.ChainableArrayListUtil<>(tasks) .filter(Task::isEvent) .filter(task -> { boolean endsBefore = task.getEndTime().compareTo(queryInterval.getFrom()) <= 0; boolean startsAfter = task.getStartTime().compareTo(queryInterval.getTo()) >= 0; return !(endsBefore || startsAfter); }) .map(task -> { LocalDateTime startTime = LocalDateTimeUtil.max(queryInterval.getFrom(), task.getStartTime()); LocalDateTime endTime = LocalDateTimeUtil.min(queryInterval.getTo(), task.getEndTime()); return new TimeInterval(startTime, endTime); }) .value(); } /** * Merge time intervals that intersect. The output is ordered. * @param input The input time intervals. * @return The output time intervals. */ private ArrayList<TimeInterval> flattenIntervals(ArrayList<TimeInterval> input) { ArrayList<TimeInterval> sortedIntervals = new ArrayListUtil.ChainableArrayListUtil<>(input) .sortBy(TimeInterval::getFrom) .value(); ArrayList<TimeInterval> output = new ArrayList<>(); if (sortedIntervals.size() == 0) { return output; } TimeInterval interval = new TimeInterval(sortedIntervals.get(0).getFrom(), sortedIntervals.get(0).getTo()); for (TimeInterval currentInterval: sortedIntervals) { if (interval.inInterval(currentInterval.getFrom())) { interval = new TimeInterval(interval.getFrom(), LocalDateTimeUtil.max(interval.getTo(), currentInterval.getTo())); } else { output.add(interval); interval = new TimeInterval(currentInterval.getFrom(), currentInterval.getTo()); } } output.add(interval); return output; } /** * Mathematically, returns {@code query} - {@code intervals}. * @param query The superset. * @param intervals The smaller subsets. * @return Return an {@code ArrayList} of time intervals that are not in {@code intervals} but in {@code query}. */ private ArrayList<TimeInterval> timeIntervalSubtraction(TimeInterval query, ArrayList<TimeInterval> intervals) { if (intervals.size() == 0) { return ArrayListUtil.fromSingleton(query); } ArrayList<TimeInterval> output = new ArrayList<>(); TimeInterval firstInterval = new TimeInterval(query.getFrom(), intervals.get(0).getFrom()); if (!firstInterval.isTrivial()) { output.add(firstInterval); } for (int i = 1; i < intervals.size(); i++) { output.add(new TimeInterval(intervals.get(i-1).getTo(), intervals.get(i).getFrom())); } TimeInterval lastInterval = new TimeInterval(intervals.get(intervals.size() - 1).getTo(), query.getTo()); if (!lastInterval.isTrivial()) { output.add(lastInterval); } return output; } /** * @param freetimes The {@code ArrayList} of free time. * @return A {@code CommandResult} displaying {@code freetimes}. */ private CommandResult makeResult(ArrayList<TimeInterval> freetimes) { return () -> { DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd h.mma"); StringBuilder builder = new StringBuilder(); builder.append("You are free at the following time slots:\n"); for (TimeInterval freetime: freetimes) { builder.append(" - "); builder.append(freetime.getFrom().format(formatter)); builder.append(" - "); builder.append(freetime.getTo().format(formatter)); builder.append("\n"); } return builder.toString(); }; } /** * @return A {@code CommandResult} indicating that the user has no free time. */ private CommandResult makeNoFreeTimeResult() { return () -> "You don't have any free time in that period."; } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.s3.model; import java.util.Date; import java.util.HashMap; import java.util.Map; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.HttpMethod; import com.amazonaws.services.s3.AmazonS3; /** * <p> * Contains options to generate a pre-signed URL for an Amazon S3 resource. * </p> * <p> * Pre-signed URLs allow clients to form a URL for an Amazon S3 resource and * sign it with the current AWS security credentials. * A pre-signed URL may be passed around for other users to access * the resource without providing them * access to an account's AWS security credentials. * </p> * * @see AmazonS3#generatePresignedUrl(GeneratePresignedUrlRequest) */ public class GeneratePresignedUrlRequest extends AmazonWebServiceRequest implements SSECustomerKeyProvider { /** The HTTP method (GET, PUT, DELETE, HEAD) to be used in this request and when the pre-signed URL is used */ private HttpMethod method; /** The name of the bucket involved in this request */ private String bucketName; /** The key of the object involved in this request */ private String key; /** The optional Content-Type header that will be sent when the presigned URL is accessed */ private String contentType; /** The optional Content-MD5 header that will be sent when the presigned URL is accessed */ private String contentMd5; /** * An optional expiration date at which point the generated pre-signed URL * will no longer be accepted by Amazon S3. If not specified, a default * value will be supplied. */ private Date expiration; /** * True if the request content is set to zero byte instead of null. This is * necessary to make pre-signed URL generation work for multi-part upload * initiation using SigV4. Ref: TT0050059365 */ private boolean zeroByteContent; /** * An optional map of additional parameters to include in the pre-signed * URL. Adding additional request parameters enables more advanced * pre-signed URLs, such as accessing Amazon S3's torrent resource for an * object, or for specifying a version ID when accessing an object. */ private Map<String, String> requestParameters = new HashMap<String, String>(); /** * Optional field that overrides headers on the response. */ private ResponseHeaderOverrides responseHeaders; /** * Optional customer-provided server-side encryption key to use as part of * the generated pre-signed URL. */ private SSECustomerKey sseCustomerKey; /** * Used to specify the server side encryption algorithm. Null means * no server side encryption is in use. */ private String sseAlgorithm; /** * Used to specify the KMS CMS Key ID when KMS server side encryption is in * use. */ private String kmsCmkId; /** * Returns the KMS customer key id used for server side encryption; or null * if there is none. */ public String getKmsCmkId() { return kmsCmkId; } /** * Sets the KMS customer key id used for server side encryption. * <p> * Note S3 does not require HTTP header * ???x-amz-server-side-encryption-aws-kms-key-id??? to be always present (a * default key ID will be used if this header is not present). * <p> * It is also possible to set the header to ???alias/aws/s3??? to refer to the * default KMS CMK ID. */ public void setKmsCmkId(String kmsCmkId) { this.kmsCmkId = kmsCmkId; } /** * Fluent API for {@link #setKmsCmkId(String)} */ public GeneratePresignedUrlRequest withKmsCmkId(String kmsCmkId) { setKmsCmkId(kmsCmkId); return this; } /** * Returns the SSE algorithm used for SSE (with server side key); or null if * SSE (with server side key) is not in use. */ public String getSSEAlgorithm() { return sseAlgorithm; } /** * Sets the SSE algorithm for server side encryption. * * @param currently supported values: "AES256" or "aws:kms". */ public void setSSEAlgorithm(String sseAlgorithm) { this.sseAlgorithm = sseAlgorithm; } /** * Fluent API for {@link #setSSEAlgorithm(String)} */ public GeneratePresignedUrlRequest withSSEAlgorithm(String sseAlgorithm) { setSSEAlgorithm(sseAlgorithm); return this; } /** * Sets the SSE algorithm for server side encryption. * * @param currently supported values: "AES256" or "aws:kms". */ public void setSSEAlgorithm(SSEAlgorithm sseAlgorithm) { this.sseAlgorithm = sseAlgorithm.getAlgorithm(); } /** * Fluent API for {@link #setSSEAlgorithm(SSEAlgorithm)} */ public GeneratePresignedUrlRequest withSSEAlgorithm(SSEAlgorithm sseAlgorithm) { setSSEAlgorithm(sseAlgorithm); return this; } /** * Creates a new request for generating a pre-signed URL that can be used as * part of an HTTP GET request to access the Amazon S3 object stored under * the specified key in the specified bucket. * * @param bucketName * The name of the bucket containing the desired Amazon S3 * object. * @param key * The key under which the desired Amazon S3 object is stored. */ public GeneratePresignedUrlRequest(String bucketName, String key) { this(bucketName, key, HttpMethod.GET); } /** * <p> * Creates a new request for generating a pre-signed URL that can be used as * part of an HTTP request to access the specified Amazon S3 resource. * </p> * <p> * When specifying an HTTP method, you <b>must</b> send the pre-signed URL * with the same HTTP method in order to successfully use the pre-signed * URL. * </p> * * @param bucketName * The name of the Amazon S3 bucket involved in the operation. * @param key * The key of the Amazon S3 object involved in the operation. * @param method * The HTTP method (GET, PUT, DELETE, HEAD) to be used in the * request when the pre-signed URL is used. */ public GeneratePresignedUrlRequest(String bucketName, String key, HttpMethod method) { this.bucketName = bucketName; this.key = key; this.method = method; } /** * The HTTP method (GET, PUT, DELETE, HEAD) to be used in this request. The * same HTTP method <b>must</b> be used in the request when the pre-signed * URL is used. * * @return The HTTP method (GET, PUT, DELETE, HEAD) to be used in this * request and when the pre-signed URL is used. */ public HttpMethod getMethod() { return method; } /** * Sets the HTTP method (GET, PUT, DELETE, HEAD) to be used in this request. * The same HTTP method <b>must</b> be used in the request when the * pre-signed URL is used. * * @param method * The HTTP method (GET, PUT, DELETE, HEAD) to be used in this * request. */ public void setMethod(HttpMethod method) { this.method = method; } /** * Sets the HTTP method (GET, PUT, DELETE, HEAD) to be used in this request, * and returns this request object to enable additional method calls to be * chained together. * <p> * The same HTTP method <b>must</b> be used in the request when the * pre-signed URL is used. * * @param method * The HTTP method (GET, PUT, DELETE, HEAD) to be used in this * request. * * @return The updated request object, so that additional method calls can * be chained together. */ public GeneratePresignedUrlRequest withMethod(HttpMethod method) { setMethod(method); return this; } /** * Returns the name of the bucket involved in this request. * * @return the name of the bucket involved in this request. */ public String getBucketName() { return bucketName; } /** * Sets the name of the bucket involved in this request. * * @param bucketName * the name of the bucket involved in this request. */ public void setBucketName(String bucketName) { this.bucketName = bucketName; } /** * Sets the name of the bucket involved in this request, and returns this * request object to enable additional method calls to be chained together. * * @param bucketName * the name of the bucket involved in this request. * * @return The updated request object, so that additional method calls can * be chained together. */ public GeneratePresignedUrlRequest withBucketName(String bucketName) { setBucketName(bucketName); return this; } /** * Returns the key of the object involved in this request. * * @return The key of the object involved in this request. */ public String getKey() { return key; } /** * Sets the key of the object involved in this request. * * @param key * the key of the object involved in this request. */ public void setKey(String key) { this.key = key; } /** * Sets the key of the object involved in this request, and returns this * request object to enable additional method calls to be chained together. * * @param key * the key of the object involved in this request. * * @return The updated request object, so that additional method calls can * be chained together. */ public GeneratePresignedUrlRequest withKey(String key) { setKey(key); return this; } /** * The expiration date at which point the new pre-signed URL will no longer * be accepted by Amazon S3. If not specified, a default value will be * supplied. * * @return The expiration date at which point the new pre-signed URL will no * longer be accepted by Amazon S3. */ public Date getExpiration() { return expiration; } /** * Sets the expiration date at which point the new pre-signed URL will no * longer be accepted by Amazon S3. If not specified, a default value will * be supplied. * * @param expiration * The expiration date at which point the new pre-signed URL will * no longer be accepted by Amazon S3. */ public void setExpiration(Date expiration) { this.expiration = expiration; } /** * Sets the expiration date at which point the new pre-signed URL will no * longer be accepted by Amazon S3, and returns this request object to * enable additional method calls to be chained together. * <p> * If not specified, a default value will be supplied. * * @param expiration * The expiration date at which point the new pre-signed URL will * no longer be accepted by Amazon S3. * * @return The updated request object, so that additional method calls can * be chained together. */ public GeneratePresignedUrlRequest withExpiration(Date expiration) { setExpiration(expiration); return this; } /** * Adds an additional request parameter to be included in the pre-signed * URL. Adding additional request parameters enables more advanced * pre-signed URLs, such as accessing Amazon S3's torrent resource for an * object, or for specifying a version ID when accessing an object. * * @param key * The name of the request parameter, as it appears in the URL's * query string (e.g. versionId). * @param value * The (optional) value of the request parameter being added. */ public void addRequestParameter(String key, String value) { requestParameters.put(key, value); } /** * Returns the complete map of additional request parameters to be included * in the pre-signed URL. * * @return The complete map of additional request parameters to be included * in the pre-signed URL. */ public Map<String, String> getRequestParameters() { return requestParameters; } /** * Returns the headers to be overridden in the service response. * * @return the headers to be overridden in the service response. */ public ResponseHeaderOverrides getResponseHeaders() { return responseHeaders; } /** * Sets the headers to be overridden in the service response. * * @param responseHeaders * The headers to be overridden in the service response. */ public void setResponseHeaders(ResponseHeaderOverrides responseHeaders) { this.responseHeaders = responseHeaders; } /** * Sets the headers to be overridden in the service response and returns * this object, for method chaining. * * @param responseHeaders * The headers to be overridden in the service response. * * * @return This {@link GeneratePresignedUrlRequest} for method chaining. */ public GeneratePresignedUrlRequest withResponseHeaders(ResponseHeaderOverrides responseHeaders) { setResponseHeaders(responseHeaders); return this; } /** * Gets the expected content-type of the request. The content-type is included in * the signature. * * @return The expected content-type */ public String getContentType() { return contentType; } /** * Sets the expected content-type of the request. The content-type is included in * the signature. * @param contentType * The expected content-type */ public void setContentType(String contentType) { this.contentType = contentType; } /** * Sets the expected content-type of the request and returns * this object, for method chaining. * * @param contentType * The expected content-type * * * @return This {@link GeneratePresignedUrlRequest} for method chaining. */ public GeneratePresignedUrlRequest withContentType(String contentType) { setContentType(contentType); return this; } /** * Gets the expected content-md5 header of the request. This header value * will be included when calculating the signature, and future requests must * include the same content-md5 header value to access the presigned URL. * * @return The expected content-md5 header value. */ public String getContentMd5() { return contentMd5; } /** * Sets the expected content-md5 header of the request. This header value * will be included when calculating the signature, and future requests must * include the same content-md5 header value to access the presigned URL. * @param contentMd5 * The expected content-md5 header value. */ public void setContentMd5(String contentMd5) { this.contentMd5 = contentMd5; } /** * Sets the expected content-md5 header of the request and returns this * object, for method chaining. * * @param contentMd5 * The expected content-md5 header value. * * @return This {@link GeneratePresignedUrlRequest} for method chaining. */ public GeneratePresignedUrlRequest withContentMd5(String contentMd5) { this.contentMd5 = contentMd5; return this; } @Override public SSECustomerKey getSSECustomerKey() { return sseCustomerKey; } /** * Sets the customer-provided server-side encryption key to use as part of * the generated pre-signed URL. * * @param sseCustomerKey * The customer-provided server-side encryption key to use as * part of the generated pre-signed URL. */ public void setSSECustomerKey(SSECustomerKey sseCustomerKey) { this.sseCustomerKey = sseCustomerKey; } /** * Sets the customer-provided server-side encryption key to use as part of * the generated pre-signed URL, and returns the updated request object so * that additional method calls can be chained together. * * @param sseKey * The customer-provided server-side encryption key to use as * part of the generated pre-signed URL. * * @return This updated request object so that additional method calls can * be chained together. */ public GeneratePresignedUrlRequest withSSECustomerKey(SSECustomerKey sseKey) { setSSECustomerKey(sseKey); return this; } /** * Sets the use of SSE-C (Server Side Encryption with Customer Key) using * the given encryption algorithm. * * @param sseAlgorithm * The server-side encryption algorithm to use with this * customer-provided server-side encryption key; or null if SSE-C * is disabled. "AES256" is currently the only * supported SSE-C encryption algorithm. */ public void setSSECustomerKeyAlgorithm(SSEAlgorithm sseAlgorithm) { if (sseAlgorithm == null) this.sseCustomerKey = null; else if (sseAlgorithm.getAlgorithm().equals(SSEAlgorithm.AES256.getAlgorithm())) { this.sseCustomerKey = SSECustomerKey.generateSSECustomerKeyForPresignUrl(sseAlgorithm.getAlgorithm()); } else { throw new IllegalArgumentException( "Currently the only supported Server Side Encryption algorithm is " + SSEAlgorithm.AES256); } } /** * Fluent method for {@link #setSSECustomerKeyAlgorithm(SSEAlgorithm)}. */ public GeneratePresignedUrlRequest withSSECustomerKeyAlgorithm(SSEAlgorithm algorithm) { setSSECustomerKeyAlgorithm(algorithm); return this; } /** * Returns true if zero byte content is to be used for generating pre-signed * URL; false otherwise. */ public boolean isZeroByteContent() { return zeroByteContent; } /** * Sets if zero byte content is to be used for generating pre-signed URL. */ public void setZeroByteContent(boolean zeroByteContent) { this.zeroByteContent = zeroByteContent; } /** * Fluent method for {@link #setZeroByteContent(boolean)}. */ public GeneratePresignedUrlRequest withZeroByteContent(boolean zeroByteContent) { setZeroByteContent(zeroByteContent); return this; } /** * Rejects any illegal input (as attributes of this request) by the user. * * @throws IllegalArgumentException if there is illegal input from the user. */ public void rejectIllegalArguments() { if (bucketName == null) { throw new IllegalArgumentException( "The bucket name parameter must be specified when generating a pre-signed URL"); } if (this.method == null) { throw new IllegalArgumentException( "The HTTP method request parameter must be specified when generating a pre-signed URL"); } if (this.sseCustomerKey != null) { if (this.sseAlgorithm != null) { throw new IllegalArgumentException("Either SSE or SSE-C can be specified but not both"); } if (this.kmsCmkId != null) { throw new IllegalArgumentException("KMS CMK is not applicable for SSE-C"); } } else if (this.kmsCmkId != null) { if (!SSEAlgorithm.KMS.getAlgorithm().equals(sseAlgorithm)) { throw new IllegalArgumentException( "For KMS server side encryption, the SSE algorithm must be set to " + SSEAlgorithm.KMS); } } /* * S3 does not require HTTP header * ???x-amz-server-side-encryption-aws-kms-key-id??? to be always present (a * default key ID will be used if this header is not present). * * It is also possible to set the header to ???alias/aws/s3??? to refer * to the default KMS CMK ID. */ } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.near; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.distributed.GridCacheModuloAffinityFunction; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheEntry; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.MvccFeatureChecker; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheRebalanceMode.NONE; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * Checks that readers are properly handled. */ public class GridCacheNearReadersSelfTest extends GridCommonAbstractTest { /** Number of grids. */ private int grids = 2; /** Grid counter. */ private AtomicInteger cntr = new AtomicInteger(0); /** Test cache affinity. */ private GridCacheModuloAffinityFunction aff = new GridCacheModuloAffinityFunction(); /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { MvccFeatureChecker.skipIfNotSupported(MvccFeatureChecker.Feature.NEAR_CACHE); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); CacheConfiguration cacheCfg = defaultCacheConfiguration(); cacheCfg.setCacheMode(PARTITIONED); cacheCfg.setWriteSynchronizationMode(FULL_SYNC); cacheCfg.setRebalanceMode(NONE); cacheCfg.setAffinity(aff); cacheCfg.setAtomicityMode(atomicityMode()); cacheCfg.setBackups(aff.backups()); NearCacheConfiguration nearCfg = new NearCacheConfiguration(); cacheCfg.setNearConfiguration(nearCfg); cfg.setCacheConfiguration(cacheCfg); cfg.setUserAttributes(F.asMap(GridCacheModuloAffinityFunction.IDX_ATTR, cntr.getAndIncrement())); return cfg; } /** * @return Atomicity mode. */ protected CacheAtomicityMode atomicityMode() { return TRANSACTIONAL; } /** @throws Exception If failed. */ private void startGrids() throws Exception { assert grids > 0; assert aff.backups() >= 0; startGrids(grids); awaitPartitionMapExchange(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); grids = -1; aff.reset(); cntr.set(0); } /** * @param nodeId Node ID. * @return Grid. */ private Ignite grid(UUID nodeId) { return G.ignite(nodeId); } /** @throws Exception If failed. */ @Test public void testTwoNodesTwoKeysNoBackups() throws Exception { aff.backups(0); grids = 2; aff.partitions(grids); startGrids(); ClusterNode n1 = F.first(aff.nodes(aff.partition(1), grid(0).cluster().nodes())); final ClusterNode n2 = F.first(aff.nodes(aff.partition(2), grid(0).cluster().nodes())); assertNotNull(n1); assertNotNull(n2); assertNotSame(n1, n2); assertFalse("Nodes cannot be equal: " + n1, n1.equals(n2)); Ignite g1 = grid(n1.id()); Ignite g2 = grid(n2.id()); IgniteCache<Integer, String> cache1 = g1.cache(DEFAULT_CACHE_NAME); IgniteCache<Integer, String> cache2 = g2.cache(DEFAULT_CACHE_NAME); // Store some values in cache. assertNull(cache1.getAndPut(1, "v1")); assertNull(cache1.getAndPut(2, "v2")); GridDhtCacheEntry e1 = (GridDhtCacheEntry)dht(cache1).entryEx(1); GridDhtCacheEntry e2 = (GridDhtCacheEntry)dht(cache2).entryEx(2); assertNotNull(e1.readers()); assertTrue(cache1.containsKey(1)); assertTrue(cache1.containsKey(2)); assertNotNull(nearPeek(cache1, 1)); assertNotNull(nearPeek(cache1, 2)); assertNotNull(dhtPeek(cache1, 1)); assertNull(dhtPeek(cache1, 2)); assertNull(nearPeek(cache2, 1)); assertNotNull(dhtPeek(cache2, 2)); // Node2 should have node1 in reader's map, since request to // put key 2 came from node1. assertTrue(e2.readers().contains(n1.id())); e1 = (GridDhtCacheEntry)dht(cache1).entryEx(1); // Node1 should not have node2 in readers map yet. assertFalse(e1.readers().contains(n2.id())); // Get key1 on node2. assertEquals("v1", cache2.get(1)); // Check that key1 is in near cache of cache2. assertNotNull(nearPeek(cache2, 1)); e1 = (GridDhtCacheEntry)dht(cache1).entryEx(1); // Now node1 should have node2 in readers map. assertTrue(e1.readers().contains(n2.id())); // Evict locally from cache2. cache2.localEvict(Collections.singleton(1)); assertNull(nearPeek(cache2, 1)); assertNull(dhtPeek(cache2, 1)); // Node 1 still has node2 in readers map. assertTrue(e1.readers().contains(n2.id())); assertNotNull(cache1.getAndPut(1, "z1")); final GridDhtCacheEntry e1f = e1; GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { try { return !e1f.readers().contains(n2.id()); } catch (GridCacheEntryRemovedException ignored) { return true; } catch (Exception e) { throw new RuntimeException(e); } } }, 5000); // Node 1 still has node2 in readers map. assertFalse(((GridDhtCacheEntry)dht(cache1).entryEx(1)).readers().contains(n2.id())); } /** @throws Exception If failed. */ @Test public void testTwoNodesTwoKeysOneBackup() throws Exception { aff.backups(1); grids = 2; aff.partitions(grids); startGrids(); ClusterNode n1 = F.first(aff.nodes(aff.partition(1), grid(0).cluster().nodes())); ClusterNode n2 = F.first(aff.nodes(aff.partition(2), grid(0).cluster().nodes())); assertNotNull(n1); assertNotNull(n2); assertNotSame(n1, n2); assertFalse("Nodes cannot be equal: " + n1, n1.equals(n2)); Ignite g1 = grid(n1.id()); Ignite g2 = grid(n2.id()); awaitPartitionMapExchange(); GridCacheContext ctx = ((IgniteKernal)g1).internalCache(DEFAULT_CACHE_NAME).context(); List<KeyCacheObject> cacheKeys = F.asList(ctx.toCacheKeyObject(1), ctx.toCacheKeyObject(2)); IgniteInternalFuture<Object> f1 = ((IgniteKernal)g1).internalCache(DEFAULT_CACHE_NAME).preloader().request( ctx, cacheKeys, new AffinityTopologyVersion(2)); if (f1 != null) f1.get(); IgniteInternalFuture<Object> f2 = ((IgniteKernal)g2).internalCache(DEFAULT_CACHE_NAME).preloader().request( ((IgniteKernal)g2).internalCache(DEFAULT_CACHE_NAME).context(), cacheKeys, new AffinityTopologyVersion(2)); if (f2 != null) f2.get(); IgniteCache<Integer, String> cache1 = g1.cache(DEFAULT_CACHE_NAME); IgniteCache<Integer, String> cache2 = g2.cache(DEFAULT_CACHE_NAME); assertEquals(g1.affinity(DEFAULT_CACHE_NAME).mapKeyToNode(1), g1.cluster().localNode()); assertFalse(g1.affinity(DEFAULT_CACHE_NAME).mapKeyToNode(2).equals(g1.cluster().localNode())); assertEquals(g1.affinity(DEFAULT_CACHE_NAME).mapKeyToNode(2), g2.cluster().localNode()); assertFalse(g2.affinity(DEFAULT_CACHE_NAME).mapKeyToNode(1).equals(g2.cluster().localNode())); // Store first value in cache. assertNull(cache1.getAndPut(1, "v1")); assertTrue(cache1.containsKey(1)); assertTrue(cache2.containsKey(1)); assertEquals("v1", nearPeek(cache1, 1)); assertEquals("v1", nearPeek(cache2, 1)); assertEquals("v1", dhtPeek(cache1, 1)); assertEquals("v1", dhtPeek(cache2, 1)); assertNull(near(cache1).peekEx(1)); assertNull(near(cache2).peekEx(1)); GridDhtCacheEntry e1 = (GridDhtCacheEntry)dht(cache1).entryEx(1); // Store second value in cache. assertNull(cache1.getAndPut(2, "v2")); assertTrue(cache1.containsKey(2)); assertTrue(cache2.containsKey(2)); assertEquals("v2", nearPeek(cache1, 2)); assertEquals("v2", nearPeek(cache2, 2)); assertEquals("v2", dhtPeek(cache1, 2)); assertEquals("v2", dhtPeek(cache2, 2)); assertNull(near(cache1).peekEx(2)); assertNull(near(cache2).peekEx(2)); GridDhtCacheEntry c2e2 = (GridDhtCacheEntry)dht(cache2).entryEx(2); // Nodes are backups of each other, so no readers should be added. assertFalse(c2e2.readers().contains(n1.id())); assertFalse(e1.readers().contains(n2.id())); // Get key1 on node2 (value should come from local DHT cache, as it has a backup). assertEquals("v1", cache2.get(1)); // Since DHT cache2 has the value, Near cache2 should not have it. assertNull(near(cache2).peekEx(1)); e1 = (GridDhtCacheEntry)dht(cache1).entryEx(1); // Since v1 was retrieved locally from cache2, cache1 should not know about it. assertFalse(e1.readers().contains(n2.id())); // Evict locally from cache2. // It should not be successful since it's not allowed to evict entry on backup node. cache2.localEvict(Collections.singleton(1)); assertNull(near(cache2).peekEx(1)); assertEquals("v1", dhtPeek(cache2, 1)); assertEquals("v1", cache1.getAndPut(1, "z1")); e1 = (GridDhtCacheEntry)dht(cache1).entryEx(1); // Node 1 should not have node2 in readers map. assertFalse(e1.readers().contains(n2.id())); assertNull(near(cache2).peekEx(1)); assertEquals("z1", dhtPeek(cache2, 1)); } /** @throws Exception If failed. */ @Test public void testPutAllManyKeysOneReader() throws Exception { aff.backups(1); grids = 4; aff.partitions(grids); startGrids(); try { IgniteCache<Object, Object> prj0 = grid(0).cache(DEFAULT_CACHE_NAME); IgniteCache<Object, Object> prj1 = grid(1).cache(DEFAULT_CACHE_NAME); Map<Integer, Integer> putMap = new HashMap<>(); int size = 100; for (int i = 0; i < size; i++) putMap.put(i, i); prj0.putAll(putMap); for (int i = 0; i < size; i++) putMap.put(i, i * i); prj1.putAll(putMap); for (int i = 0; i < size; i++) { assertEquals(i * i, prj0.get(i)); assertEquals(i * i, prj1.get(i)); } } finally { stopAllGrids(); } } /** @throws Exception If failed. */ @Test public void testPutAllManyKeysTwoReaders() throws Exception { aff.backups(1); grids = 5; aff.partitions(grids); startGrids(); try { IgniteCache<Object, Object> prj0 = grid(0).cache(DEFAULT_CACHE_NAME); IgniteCache<Object, Object> prj1 = grid(1).cache(DEFAULT_CACHE_NAME); IgniteCache<Object, Object> prj2 = grid(2).cache(DEFAULT_CACHE_NAME); Map<Integer, Integer> putMap = new HashMap<>(); int size = 100; for (int i = 0; i < size; i++) putMap.put(i, i); prj0.putAll(putMap); for (int i = 0; i < size; i++) putMap.put(i, i * i); prj1.putAll(putMap); for (int i = 0; i < size; i++) putMap.put(i, i * i * i); prj2.putAll(putMap); for (int i = 0; i < size; i++) { assertEquals(i * i * i, prj0.get(i)); assertEquals(i * i * i, prj1.get(i)); assertEquals(i * i * i, prj2.get(i)); } } finally { stopAllGrids(); } } /** @throws Exception If failed. */ @Test public void testBackupEntryReaders() throws Exception { aff.backups(1); grids = 2; aff.partitions(grids); startGrids(); Collection<ClusterNode> nodes = new ArrayList<>(aff.nodes(aff.partition(1), grid(0).cluster().nodes())); ClusterNode primary = F.first(nodes); assert primary != null; nodes.remove(primary); ClusterNode backup = F.first(nodes); assert backup != null; assertNotSame(primary, backup); assertFalse("Nodes cannot be equal: " + primary, primary.equals(backup)); IgniteCache<Integer, String> cache1 = grid(primary.id()).cache(DEFAULT_CACHE_NAME); IgniteCache<Integer, String> cache2 = grid(backup.id()).cache(DEFAULT_CACHE_NAME); // Store a values in cache. assertNull(cache1.getAndPut(1, "v1")); GridDhtCacheEntry e1 = (GridDhtCacheEntry)dht(cache1).peekEx(1); GridDhtCacheEntry e2 = (GridDhtCacheEntry)dht(cache2).peekEx(1); assertNull(e1); assertNull(e2); } /** @throws Exception If failed. */ @Test public void testImplicitLockReaders() throws Exception { grids = 3; aff.reset(grids, 1); startGrids(); int key1 = 3; String val1 = Integer.toString(key1); assertEquals(grid(0).localNode(), F.first(aff.nodes(aff.partition(key1), grid(0).cluster().nodes()))); int key2 = 1; String val2 = Integer.toString(key2); assertEquals(grid(1).localNode(), F.first(aff.nodes(aff.partition(key2), grid(1).cluster().nodes()))); IgniteCache<Integer, String> cache = jcache(0); assertNull(cache.getAndPut(key1, val1)); assertEquals(val1, dhtPeek(0, key1)); assertEquals(val1, dhtPeek(1, key1)); assertNull(dhtPeek(2, key1)); assertNull(near(0).peekEx(key1)); assertNull(near(1).peekEx(key1)); assertNull(near(2).peekEx(key1)); cache.put(key2, val2); assertNull(dhtPeek(0, key2)); assertEquals(val2, dhtPeek(1, key2)); assertEquals(val2, dhtPeek(2, key2)); assertEquals(val2, near(0).peekEx(key2).wrap().getValue()); assertNull(near(1).peekEx(key2)); assertNull(near(2).peekEx(key2)); String val22 = val2 + "2"; cache.put(key2, val22); assertNull(dhtPeek(0, key2)); assertEquals(val22, dhtPeek(1, key2)); assertEquals(val22, dhtPeek(2, key2)); assertEquals(val22, near(0).peekEx(key2).wrap().getValue()); assertNull(near(1).peekEx(key2)); assertNull(near(2).peekEx(key2)); cache.remove(key2); assertNull(dhtPeek(0, key2)); assertNull(dhtPeek(1, key2)); assertNull(dhtPeek(2, key2)); assertTrue(near(0).peekEx(key2) == null || near(0).peekEx(key2).deleted()); assertNull(near(1).peekEx(key2)); assertNull(near(2).peekEx(key2)); cache.remove(key1); assertNull(dhtPeek(0, key1)); assertNull(dhtPeek(1, key1)); assertNull(dhtPeek(2, key1)); assertNull(near(0).peekEx(key1)); assertNull(near(1).peekEx(key1)); assertNull(near(2).peekEx(key1)); for (int i = 0; i < grids; i++) { assert !jcache(i).isLocalLocked(key1, false); assert !jcache(i).isLocalLocked(key2, false); assert jcache(i).localSize() == 0; } } /** @throws Exception If failed. */ @Test public void testExplicitLockReaders() throws Exception { if (atomicityMode() == ATOMIC) return; grids = 3; aff.reset(grids, 1); startGrids(); int key1 = 3; String val1 = Integer.toString(key1); assertEquals(grid(0).localNode(), F.first(aff.nodes(aff.partition(key1), grid(0).cluster().nodes()))); int key2 = 1; String val2 = Integer.toString(key2); assertEquals(grid(1).localNode(), F.first(aff.nodes(aff.partition(key2), grid(1).cluster().nodes()))); IgniteCache<Integer, String> cache = jcache(0); Lock lock1 = cache.lock(key1); lock1.lock(); try { // Nested lock. Lock lock2 = cache.lock(key2); lock2.lock(); try { assertNull(cache.getAndPut(key1, val1)); assertEquals(val1, dhtPeek(0, key1)); assertEquals(val1, dhtPeek(1, key1)); assertNull(dhtPeek(2, key1)); // Since near entry holds the lock, it should // contain correct value. assertEquals(val1, near(0).peekEx(key1).wrap().getValue()); assertNull(near(1).peekEx(key1)); assertNull(near(2).peekEx(key1)); cache.put(key2, val2); assertNull(dhtPeek(0, key2)); assertEquals(val2, dhtPeek(1, key2)); assertEquals(val2, dhtPeek(2, key2)); assertEquals(val2, near(0).peekEx(key2).wrap().getValue()); assertNull(near(1).peekEx(key2)); assertNull(near(2).peekEx(key2)); String val22 = val2 + "2"; cache.put(key2, val22); assertNull(dhtPeek(0, key2)); assertEquals(val22, dhtPeek(1, key2)); assertEquals(val22, dhtPeek(2, key2)); assertEquals(val22, near(0).peekEx(key2).wrap().getValue()); assertNull(near(1).peekEx(key2)); assertNull(near(2).peekEx(key2)); cache.remove(key2); assertNull(dhtPeek(0, key2)); assertNull(dhtPeek(1, key2)); assertNull(dhtPeek(2, key2)); assertNull(dht(0).peekEx(key2)); assertNotNull(dht(1).peekEx(key2)); assertNotNull(dht(2).peekEx(key2)); assertNotNull(near(0).peekEx(key2)); assertNull(near(1).peekEx(key2)); assertNull(near(2).peekEx(key2)); } finally { lock2.unlock(); } } finally { lock1.unlock(); } } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.config.http; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.w3c.dom.Element; import org.springframework.beans.BeanMetadataElement; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.beans.factory.parsing.BeanComponentDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.springframework.security.access.SecurityConfig; import org.springframework.security.config.Elements; import org.springframework.security.web.access.expression.DefaultWebSecurityExpressionHandler; import org.springframework.security.web.access.expression.ExpressionBasedFilterInvocationSecurityMetadataSource; import org.springframework.security.web.access.intercept.DefaultFilterInvocationSecurityMetadataSource; import org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; /** * Allows for convenient creation of a {@link FilterInvocationSecurityMetadataSource} bean * for use with a FilterSecurityInterceptor. * * @author Luke Taylor */ public class FilterInvocationSecurityMetadataSourceParser implements BeanDefinitionParser { private static final String ATT_USE_EXPRESSIONS = "use-expressions"; private static final String ATT_HTTP_METHOD = "method"; private static final String ATT_PATTERN = "pattern"; private static final String ATT_ACCESS = "access"; private static final String ATT_SERVLET_PATH = "servlet-path"; private static final Log logger = LogFactory.getLog(FilterInvocationSecurityMetadataSourceParser.class); @Override public BeanDefinition parse(Element element, ParserContext parserContext) { List<Element> interceptUrls = DomUtils.getChildElementsByTagName(element, Elements.INTERCEPT_URL); // Check for attributes that aren't allowed in this context for (Element elt : interceptUrls) { if (StringUtils.hasLength(elt.getAttribute(HttpSecurityBeanDefinitionParser.ATT_REQUIRES_CHANNEL))) { parserContext.getReaderContext().error("The attribute '" + HttpSecurityBeanDefinitionParser.ATT_REQUIRES_CHANNEL + "' isn't allowed here.", elt); } if (StringUtils.hasLength(elt.getAttribute(HttpSecurityBeanDefinitionParser.ATT_FILTERS))) { parserContext.getReaderContext().error( "The attribute '" + HttpSecurityBeanDefinitionParser.ATT_FILTERS + "' isn't allowed here.", elt); } if (StringUtils.hasLength(elt.getAttribute(ATT_SERVLET_PATH))) { parserContext.getReaderContext().error("The attribute '" + ATT_SERVLET_PATH + "' isn't allowed here.", elt); } } BeanDefinition mds = createSecurityMetadataSource(interceptUrls, false, element, parserContext); String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE); if (StringUtils.hasText(id)) { parserContext.registerComponent(new BeanComponentDefinition(mds, id)); parserContext.getRegistry().registerBeanDefinition(id, mds); } return mds; } static RootBeanDefinition createSecurityMetadataSource(List<Element> interceptUrls, boolean addAllAuth, Element httpElt, ParserContext pc) { MatcherType matcherType = MatcherType.fromElement(httpElt); boolean useExpressions = isUseExpressions(httpElt); ManagedMap<BeanMetadataElement, BeanDefinition> requestToAttributesMap = parseInterceptUrlsForFilterInvocationRequestMap( matcherType, interceptUrls, useExpressions, addAllAuth, pc); BeanDefinitionBuilder fidsBuilder; if (useExpressions) { Element expressionHandlerElt = DomUtils.getChildElementByTagName(httpElt, Elements.EXPRESSION_HANDLER); String expressionHandlerRef = (expressionHandlerElt != null) ? expressionHandlerElt.getAttribute("ref") : null; if (StringUtils.hasText(expressionHandlerRef)) { logger.info("Using bean '" + expressionHandlerRef + "' as web " + "SecurityExpressionHandler implementation"); } else { expressionHandlerRef = registerDefaultExpressionHandler(pc); } fidsBuilder = BeanDefinitionBuilder .rootBeanDefinition(ExpressionBasedFilterInvocationSecurityMetadataSource.class); fidsBuilder.addConstructorArgValue(requestToAttributesMap); fidsBuilder.addConstructorArgReference(expressionHandlerRef); } else { fidsBuilder = BeanDefinitionBuilder.rootBeanDefinition(DefaultFilterInvocationSecurityMetadataSource.class); fidsBuilder.addConstructorArgValue(requestToAttributesMap); } fidsBuilder.getRawBeanDefinition().setSource(pc.extractSource(httpElt)); return (RootBeanDefinition) fidsBuilder.getBeanDefinition(); } static String registerDefaultExpressionHandler(ParserContext pc) { BeanDefinition expressionHandler = GrantedAuthorityDefaultsParserUtils.registerWithDefaultRolePrefix(pc, DefaultWebSecurityExpressionHandlerBeanFactory.class); String expressionHandlerRef = pc.getReaderContext().generateBeanName(expressionHandler); pc.registerBeanComponent(new BeanComponentDefinition(expressionHandler, expressionHandlerRef)); return expressionHandlerRef; } static boolean isUseExpressions(Element elt) { String useExpressions = elt.getAttribute(ATT_USE_EXPRESSIONS); return !StringUtils.hasText(useExpressions) || "true".equals(useExpressions); } private static ManagedMap<BeanMetadataElement, BeanDefinition> parseInterceptUrlsForFilterInvocationRequestMap( MatcherType matcherType, List<Element> urlElts, boolean useExpressions, boolean addAuthenticatedAll, ParserContext parserContext) { ManagedMap<BeanMetadataElement, BeanDefinition> filterInvocationDefinitionMap = new ManagedMap<>(); for (Element urlElt : urlElts) { String access = urlElt.getAttribute(ATT_ACCESS); if (!StringUtils.hasText(access)) { continue; } String path = urlElt.getAttribute(ATT_PATTERN); String matcherRef = urlElt.getAttribute(HttpSecurityBeanDefinitionParser.ATT_REQUEST_MATCHER_REF); boolean hasMatcherRef = StringUtils.hasText(matcherRef); if (!hasMatcherRef && !StringUtils.hasText(path)) { parserContext.getReaderContext().error("path attribute cannot be empty or null", urlElt); } String method = urlElt.getAttribute(ATT_HTTP_METHOD); if (!StringUtils.hasText(method)) { method = null; } String servletPath = urlElt.getAttribute(ATT_SERVLET_PATH); if (!StringUtils.hasText(servletPath)) { servletPath = null; } else if (!MatcherType.mvc.equals(matcherType)) { parserContext.getReaderContext().error( ATT_SERVLET_PATH + " is not applicable for request-matcher: '" + matcherType.name() + "'", urlElt); } BeanMetadataElement matcher = hasMatcherRef ? new RuntimeBeanReference(matcherRef) : matcherType.createMatcher(parserContext, path, method, servletPath); BeanDefinitionBuilder attributeBuilder = BeanDefinitionBuilder.rootBeanDefinition(SecurityConfig.class); if (useExpressions) { logger.info("Creating access control expression attribute '" + access + "' for " + path); // The single expression will be parsed later by the // ExpressionBasedFilterInvocationSecurityMetadataSource attributeBuilder.addConstructorArgValue(new String[] { access }); attributeBuilder.setFactoryMethod("createList"); } else { attributeBuilder.addConstructorArgValue(access); attributeBuilder.setFactoryMethod("createListFromCommaDelimitedString"); } if (filterInvocationDefinitionMap.containsKey(matcher)) { logger.warn("Duplicate URL defined: " + path + ". The original attribute values will be overwritten"); } filterInvocationDefinitionMap.put(matcher, attributeBuilder.getBeanDefinition()); } if (addAuthenticatedAll && filterInvocationDefinitionMap.isEmpty()) { BeanDefinition matcher = matcherType.createMatcher(parserContext, "/**", null); BeanDefinitionBuilder attributeBuilder = BeanDefinitionBuilder.rootBeanDefinition(SecurityConfig.class); attributeBuilder.addConstructorArgValue(new String[] { "authenticated" }); attributeBuilder.setFactoryMethod("createList"); filterInvocationDefinitionMap.put(matcher, attributeBuilder.getBeanDefinition()); } return filterInvocationDefinitionMap; } static class DefaultWebSecurityExpressionHandlerBeanFactory extends GrantedAuthorityDefaultsParserUtils.AbstractGrantedAuthorityDefaultsBeanFactory { private DefaultWebSecurityExpressionHandler handler = new DefaultWebSecurityExpressionHandler(); @Override public DefaultWebSecurityExpressionHandler getBean() { this.handler.setDefaultRolePrefix(this.rolePrefix); return this.handler; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server; import org.apache.jute.BinaryOutputArchive; import org.apache.jute.Record; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.SessionExpiredException; import org.apache.zookeeper.KeeperException.SessionMovedException; import org.apache.zookeeper.MultiTransactionRecord; import org.apache.zookeeper.Op; import org.apache.zookeeper.PortAssignment; import org.apache.zookeeper.ZooDefs.Ids; import org.apache.zookeeper.ZooDefs.OpCode; import org.apache.zookeeper.data.Id; import org.apache.zookeeper.server.ZooKeeperServer.ChangeRecord; import org.apache.zookeeper.test.ClientBase; import org.apache.zookeeper.txn.ErrorTxn; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import java.util.concurrent.CountDownLatch; public class PrepRequestProcessorTest extends ClientBase { private static final Logger LOG = LoggerFactory.getLogger(PrepRequestProcessorTest.class); private static final int CONNECTION_TIMEOUT = 3000; private static String HOSTPORT = "127.0.0.1:" + PortAssignment.unique(); private CountDownLatch pLatch; private ZooKeeperServer zks; private ServerCnxnFactory servcnxnf; private PrepRequestProcessor processor; private Request outcome; @Before public void setup() throws Exception { File tmpDir = ClientBase.createTmpDir(); ClientBase.setupTestEnv(); zks = new ZooKeeperServer(tmpDir, tmpDir, 3000); SyncRequestProcessor.setSnapCount(100); final int PORT = Integer.parseInt(HOSTPORT.split(":")[1]); servcnxnf = ServerCnxnFactory.createFactory(PORT, -1); servcnxnf.startup(zks); Assert.assertTrue("waiting for server being up ", ClientBase.waitForServerUp(HOSTPORT, CONNECTION_TIMEOUT)); zks.sessionTracker = new MySessionTracker(); } @After public void teardown() throws Exception { if (servcnxnf != null) { servcnxnf.shutdown(); } if (zks != null) { zks.shutdown(); } } @Test public void testPRequest() throws Exception { pLatch = new CountDownLatch(1); processor = new PrepRequestProcessor(zks, new MyRequestProcessor()); Request foo = new Request(null, 1l, 1, OpCode.create, ByteBuffer.allocate(3), null); processor.pRequest(foo); Assert.assertEquals("Request should have marshalling error", new ErrorTxn(KeeperException.Code.MARSHALLINGERROR.intValue()), outcome.txn); Assert.assertTrue("request hasn't been processed in chain", pLatch.await(5, java.util.concurrent.TimeUnit.SECONDS)); } private Request createMultiRequest(List<Op> ops) throws IOException { Record record = new MultiTransactionRecord(ops); // encoding ByteArrayOutputStream baos = new ByteArrayOutputStream(); BinaryOutputArchive boa = BinaryOutputArchive.getArchive(baos); record.serialize(boa, "request"); baos.close(); // Id List<Id> ids = Arrays.asList(Ids.ANYONE_ID_UNSAFE); return new Request(null, 1l, 0, OpCode.multi, ByteBuffer.wrap(baos.toByteArray()), ids); } private void process(List<Op> ops) throws Exception { pLatch = new CountDownLatch(1); processor = new PrepRequestProcessor(zks, new MyRequestProcessor()); Request req = createMultiRequest(ops); processor.pRequest(req); Assert.assertTrue("request hasn't been processed in chain", pLatch.await(5, java.util.concurrent.TimeUnit.SECONDS)); } /** * This test checks that a successful multi will change outstanding record * and failed multi shouldn't change outstanding record. */ @Test public void testMultiOutstandingChange() throws Exception { zks.getZKDatabase().dataTree.createNode("/foo", new byte[0], Ids.OPEN_ACL_UNSAFE, 0, 0, 0, 0); Assert.assertNull(zks.outstandingChangesForPath.get("/foo")); process(Arrays.asList( Op.setData("/foo", new byte[0], -1))); ChangeRecord cr = zks.outstandingChangesForPath.get("/foo"); Assert.assertNotNull("Change record wasn't set", cr); Assert.assertEquals("Record zxid wasn't set correctly", 1, cr.zxid); process(Arrays.asList( Op.delete("/foo", -1))); cr = zks.outstandingChangesForPath.get("/foo"); Assert.assertEquals("Record zxid wasn't set correctly", 2, cr.zxid); // It should fail and shouldn't change outstanding record. process(Arrays.asList( Op.delete("/foo", -1))); cr = zks.outstandingChangesForPath.get("/foo"); // zxid should still be previous result because record's not changed. Assert.assertEquals("Record zxid wasn't set correctly", 2, cr.zxid); } /** * ZOOKEEPER-2052: * This test checks that if a multi operation aborted, and during the multi there is side effect * that changed outstandingChangesForPath, after aborted the side effect should be removed and * everything should be restored correctly. */ @Test public void testMultiRollbackNoLastChange() throws Exception { zks.getZKDatabase().dataTree.createNode("/foo", new byte[0], Ids.OPEN_ACL_UNSAFE, 0, 0, 0, 0); zks.getZKDatabase().dataTree.createNode("/foo/bar", new byte[0], Ids.OPEN_ACL_UNSAFE, 0, 0, 0, 0); pLatch = new CountDownLatch(1); processor = new PrepRequestProcessor(zks, new MyRequestProcessor()); Assert.assertNull(zks.outstandingChangesForPath.get("/foo")); // multi record: // set "/foo" => succeed, leave a outstanding change // delete "/foo" => fail, roll back change process(Arrays.asList( Op.setData("/foo", new byte[0], -1), Op.delete("/foo", -1))); // aborting multi shouldn't leave any record. Assert.assertNull(zks.outstandingChangesForPath.get("/foo")); } private class MyRequestProcessor implements RequestProcessor { @Override public void processRequest(Request request) { // getting called by PrepRequestProcessor outcome = request; pLatch.countDown(); } @Override public void shutdown() { // TODO Auto-generated method stub } } private class MySessionTracker implements SessionTracker { @Override public void addSession(long id, int to) { // TODO Auto-generated method stub } @Override public void checkSession(long sessionId, Object owner) throws SessionExpiredException, SessionMovedException { // TODO Auto-generated method stub } @Override public long createSession(int sessionTimeout) { // TODO Auto-generated method stub return 0; } @Override public void dumpSessions(PrintWriter pwriter) { // TODO Auto-generated method stub } @Override public void removeSession(long sessionId) { // TODO Auto-generated method stub } @Override public void setOwner(long id, Object owner) throws SessionExpiredException { // TODO Auto-generated method stub } @Override public void shutdown() { // TODO Auto-generated method stub } @Override public boolean touchSession(long sessionId, int sessionTimeout) { // TODO Auto-generated method stub return false; } @Override public void setSessionClosing(long sessionId) { // TODO Auto-generated method stub } } }
package org.apache.hawq.pxf.api; import org.apache.hawq.pxf.api.FilterParser.FilterBuilder; import org.apache.hawq.pxf.api.FilterParser.Operation; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith(PowerMockRunner.class) @PrepareForTest({FilterBuilder.class}) public class FilterParserTest { FilterBuilder filterBuilder; FilterParser filterParser; String filter, exception; @Before public void setUp() throws Exception { filterBuilder = mock(FilterBuilder.class); filterParser = new FilterParser(filterBuilder); } @Test public void parseNegativeNull() { filter = null; runParseNegative("null string", null, "filter parsing ended with no result"); } @Test public void parseNegativeEmpty() { filter = ""; runParseNegative("empty string", filter, "filter parsing ended with no result"); } @Test public void parseNegativeNotOperand() { filter = "g is not an operand"; int index = 0; char op = filter.charAt(index); runParseNegative("illegal operand g", filter, "unknown opcode " + op + "(" + (int) op + ") at " + index); } @Test public void parseNegativeBadNumber() { filter = "a"; int index = 1; exception = "numeric argument expected at " + index; runParseNegative("numeric operand with no number", filter, exception); filter = "aa"; exception = "numeric argument expected at " + index; runParseNegative("numeric operand with non-number value", filter, exception); filter = "a12345678901234567890123456789"; exception = "invalid numeric argument 12345678901234567890123456789"; runParseNegative("numeric operand with too big number", filter, exception); filter = "a-12345678901234567890"; exception = "invalid numeric argument -12345678901234567890"; runParseNegative("numeric operand with too big negative number", filter, exception); filter = "a12345678901223456"; exception = "value 12345678901223456 larger than intmax ending at " + filter.length(); runParseNegative("numeric operand with long value", filter, exception); filter = "a-12345678901223456"; exception = "value -12345678901223456 larger than intmax ending at " + filter.length(); runParseNegative("numeric operand with negative long value", filter, exception); } @Test public void parseNegativeBadConst() { filter = "c"; int index = 1; exception = "argument should follow at " + index; runParseNegative("const operand with no value", filter, exception); filter = "cyan"; exception = "numeric argument expected at " + index; runParseNegative("const operand with illegal value", filter, exception); filter = "c\"and that's it"; exception = "string started at " + index + " not ended with \""; runParseNegative("string without closing \"", filter, exception); } @Test public void parseNegativeBadOperation() { filter = "o"; int index = 1; exception = "numeric argument expected at " + index; runParseNegative("operation with no value", filter, exception); filter = "ohno"; exception = "numeric argument expected at " + index; runParseNegative("operation with no number", filter, exception); filter = "o100"; index = 4; exception = "unknown op ending at " + index; runParseNegative("operation with out of bounds number", filter, exception); } @Test public void parseNegativeNoOperator() { filter = "a1234567890"; runParseNegative("filter with only column", filter, "filter parsing failed, missing operators?"); filter = "c1"; runParseNegative("filter with only numeric const", filter, "filter parsing failed, missing operators?"); filter = "c\"something in the way\""; runParseNegative("filter with only string const", filter, "filter parsing failed, missing operators?"); } @Test public void parseNegativeTwoParams() { filter = "c1c2"; exception = "Stack not empty, missing operators?"; runParseNegative("filter with two consts in a row", filter, exception); filter = "c1a1"; exception = "Stack not empty, missing operators?"; runParseNegative("filter with const and attribute", filter, exception); filter = "a1c80"; exception = "Stack not empty, missing operators?"; runParseNegative("filter with attribute and const", filter, exception); } @Test public void parseNegativeOperationFirst() { filter = "o1a3"; int index = 2; FilterParser.Operation operation = FilterParser.Operation.HDOP_LT; exception = "missing operands for op " + operation + " at " + index; runParseNegative("filter with operation first", filter, exception); filter = "a2o1"; index = 4; exception = "missing operands for op " + operation + " at " + index; runParseNegative("filter with only attribute before operation", filter, exception); } @Test public void parseColumnOnLeft() throws Exception { filter = "a1c2o1"; Operation op = Operation.HDOP_LT; runParseOneOperation("this filter was build from HDOP_LT", filter, op); filter = "a1c2o2"; op = Operation.HDOP_GT; runParseOneOperation("this filter was build from HDOP_GT", filter, op); filter = "a1c2o3"; op = Operation.HDOP_LE; runParseOneOperation("this filter was build from HDOP_LE", filter, op); filter = "a1c2o4"; op = Operation.HDOP_GE; runParseOneOperation("this filter was build from HDOP_GE", filter, op); filter = "a1c2o5"; op = Operation.HDOP_EQ; runParseOneOperation("this filter was build from HDOP_EQ", filter, op); filter = "a1c2o6"; op = Operation.HDOP_NE; runParseOneOperation("this filter was build from HDOP_NE", filter, op); filter = "a1c2o7"; op = Operation.HDOP_AND; runParseOneOperation("this filter was build from HDOP_AND", filter, op); } @Test public void parseColumnOnRight() throws Exception { filter = "c2a1o1"; Operation op = Operation.HDOP_GT; runParseOneOperation("this filter was build from HDOP_LT -> HDOP_GT using reverse!", filter, op); filter = "c2a1o2"; op = Operation.HDOP_LT; runParseOneOperation("this filter was build from HDOP_GT -> HDOP_LT using reverse!", filter, op); filter = "c2a1o3"; op = Operation.HDOP_GE; runParseOneOperation("this filter was build from HDOP_LE -> HDOP_GE using reverse!", filter, op); filter = "c2a1o4"; op = Operation.HDOP_LE; runParseOneOperation("this filter was build from HDOP_GE -> HDOP_LE using reverse!", filter, op); filter = "c2a1o5"; op = Operation.HDOP_EQ; runParseOneOperation("this filter was build from HDOP_EQ using reverse!", filter, op); filter = "c2a1o6"; op = Operation.HDOP_NE; runParseOneOperation("this filter was build from HDOP_NE using reverse!", filter, op); filter = "c2a1o7"; op = Operation.HDOP_AND; runParseOneOperation("this filter was build from HDOP_AND using reverse!", filter, op); } @Test public void parseFilterWith2Operations() throws Exception { filter = "a1c\"first\"o5a2c2o2o7"; Object firstOp = "first operation HDOP_EQ"; Object secondOp = "second operation HDOP_GT"; Object lastOp = "filter with 2 operations connected by AND"; when(filterBuilder.build(eq(Operation.HDOP_EQ), any(), any())).thenReturn(firstOp); when(filterBuilder.build(eq(Operation.HDOP_GT), any(), any())).thenReturn(secondOp); when(filterBuilder.build(eq(Operation.HDOP_AND), eq(firstOp), eq(secondOp))).thenReturn(lastOp); Object result = filterParser.parse(filter); assertEquals(lastOp, result); } /* * Helper functions */ private void runParseNegative(String description, String filter, String exception) { try { filterParser.parse(filter); fail(description + ": should have failed with FilterStringSyntaxException"); } catch (FilterParser.FilterStringSyntaxException e) { assertEquals(description, exception + filterStringMsg(filter), e.getMessage()); } catch (Exception e) { fail(description + ": should have failed with FilterStringSyntaxException and not " + e.getMessage()); } } private void runParseOneOperation(String description, String filter, Operation op) throws Exception { when(filterBuilder.build(eq(op), any(), any())).thenReturn(description); Object result = filterParser.parse(filter); assertEquals(description, result); } private String filterStringMsg(String filter) { return " (filter string: '" + filter + "')"; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package org.apache.kafka.connect.json; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import org.apache.kafka.connect.data.Date; import org.apache.kafka.connect.data.Decimal; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.kafka.common.cache.Cache; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.SchemaAndValue; import org.apache.kafka.connect.data.SchemaBuilder; import org.apache.kafka.connect.data.Struct; import org.apache.kafka.connect.data.Time; import org.apache.kafka.connect.data.Timestamp; import org.apache.kafka.connect.errors.DataException; import org.junit.Before; import org.junit.Test; import org.powermock.reflect.Whitebox; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.TimeZone; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class JsonConverterTest { private static final String TOPIC = "topic"; ObjectMapper objectMapper = new ObjectMapper(); JsonConverter converter = new JsonConverter(); @Before public void setUp() { converter.configure(Collections.EMPTY_MAP, false); } // Schema metadata @Test public void testConnectSchemaMetadataTranslation() { // this validates the non-type fields are translated and handled properly assertEquals(new SchemaAndValue(Schema.BOOLEAN_SCHEMA, true), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes())); assertEquals(new SchemaAndValue(Schema.OPTIONAL_BOOLEAN_SCHEMA, null), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"optional\": true }, \"payload\": null }".getBytes())); assertEquals(new SchemaAndValue(SchemaBuilder.bool().defaultValue(true).build(), true), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"default\": true }, \"payload\": null }".getBytes())); assertEquals(new SchemaAndValue(SchemaBuilder.bool().required().name("bool").version(2).doc("the documentation").parameter("foo", "bar").build(), true), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"optional\": false, \"name\": \"bool\", \"version\": 2, \"doc\": \"the documentation\", \"parameters\": { \"foo\": \"bar\" }}, \"payload\": true }".getBytes())); } // Schema types @Test public void booleanToConnect() { assertEquals(new SchemaAndValue(Schema.BOOLEAN_SCHEMA, true), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes())); assertEquals(new SchemaAndValue(Schema.BOOLEAN_SCHEMA, false), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": false }".getBytes())); } @Test public void byteToConnect() { assertEquals(new SchemaAndValue(Schema.INT8_SCHEMA, (byte) 12), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int8\" }, \"payload\": 12 }".getBytes())); } @Test public void shortToConnect() { assertEquals(new SchemaAndValue(Schema.INT16_SCHEMA, (short) 12), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int16\" }, \"payload\": 12 }".getBytes())); } @Test public void intToConnect() { assertEquals(new SchemaAndValue(Schema.INT32_SCHEMA, 12), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int32\" }, \"payload\": 12 }".getBytes())); } @Test public void longToConnect() { assertEquals(new SchemaAndValue(Schema.INT64_SCHEMA, 12L), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int64\" }, \"payload\": 12 }".getBytes())); assertEquals(new SchemaAndValue(Schema.INT64_SCHEMA, 4398046511104L), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int64\" }, \"payload\": 4398046511104 }".getBytes())); } @Test public void floatToConnect() { assertEquals(new SchemaAndValue(Schema.FLOAT32_SCHEMA, 12.34f), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"float\" }, \"payload\": 12.34 }".getBytes())); } @Test public void doubleToConnect() { assertEquals(new SchemaAndValue(Schema.FLOAT64_SCHEMA, 12.34), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"double\" }, \"payload\": 12.34 }".getBytes())); } @Test public void bytesToConnect() throws UnsupportedEncodingException { ByteBuffer reference = ByteBuffer.wrap("test-string".getBytes("UTF-8")); String msg = "{ \"schema\": { \"type\": \"bytes\" }, \"payload\": \"dGVzdC1zdHJpbmc=\" }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); ByteBuffer converted = ByteBuffer.wrap((byte[]) schemaAndValue.value()); assertEquals(reference, converted); } @Test public void stringToConnect() { assertEquals(new SchemaAndValue(Schema.STRING_SCHEMA, "foo-bar-baz"), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"string\" }, \"payload\": \"foo-bar-baz\" }".getBytes())); } @Test public void arrayToConnect() { byte[] arrayJson = "{ \"schema\": { \"type\": \"array\", \"items\": { \"type\" : \"int32\" } }, \"payload\": [1, 2, 3] }".getBytes(); assertEquals(new SchemaAndValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList(1, 2, 3)), converter.toConnectData(TOPIC, arrayJson)); } @Test public void mapToConnectStringKeys() { byte[] mapJson = "{ \"schema\": { \"type\": \"map\", \"keys\": { \"type\" : \"string\" }, \"values\": { \"type\" : \"int32\" } }, \"payload\": { \"key1\": 12, \"key2\": 15} }".getBytes(); Map<String, Integer> expected = new HashMap<>(); expected.put("key1", 12); expected.put("key2", 15); assertEquals(new SchemaAndValue(SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA).build(), expected), converter.toConnectData(TOPIC, mapJson)); } @Test public void mapToConnectNonStringKeys() { byte[] mapJson = "{ \"schema\": { \"type\": \"map\", \"keys\": { \"type\" : \"int32\" }, \"values\": { \"type\" : \"int32\" } }, \"payload\": [ [1, 12], [2, 15] ] }".getBytes(); Map<Integer, Integer> expected = new HashMap<>(); expected.put(1, 12); expected.put(2, 15); assertEquals(new SchemaAndValue(SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build(), expected), converter.toConnectData(TOPIC, mapJson)); } @Test public void structToConnect() { byte[] structJson = "{ \"schema\": { \"type\": \"struct\", \"fields\": [{ \"field\": \"field1\", \"type\": \"boolean\" }, { \"field\": \"field2\", \"type\": \"string\" }] }, \"payload\": { \"field1\": true, \"field2\": \"string\" } }".getBytes(); Schema expectedSchema = SchemaBuilder.struct().field("field1", Schema.BOOLEAN_SCHEMA).field("field2", Schema.STRING_SCHEMA).build(); Struct expected = new Struct(expectedSchema).put("field1", true).put("field2", "string"); SchemaAndValue converted = converter.toConnectData(TOPIC, structJson); assertEquals(new SchemaAndValue(expectedSchema, expected), converted); } @Test(expected = DataException.class) public void nullToConnect() { // When schemas are enabled, trying to decode a null should be an error -- we should *always* have the envelope assertEquals(SchemaAndValue.NULL, converter.toConnectData(TOPIC, null)); } @Test public void nullSchemaPrimitiveToConnect() { SchemaAndValue converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": null }".getBytes()); assertEquals(SchemaAndValue.NULL, converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": true }".getBytes()); assertEquals(new SchemaAndValue(null, true), converted); // Integers: Connect has more data types, and JSON unfortunately mixes all number types. We try to preserve // info as best we can, so we always use the largest integer and floating point numbers we can and have Jackson // determine if it's an integer or not converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": 12 }".getBytes()); assertEquals(new SchemaAndValue(null, 12L), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": 12.24 }".getBytes()); assertEquals(new SchemaAndValue(null, 12.24), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": \"a string\" }".getBytes()); assertEquals(new SchemaAndValue(null, "a string"), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": [1, \"2\", 3] }".getBytes()); assertEquals(new SchemaAndValue(null, Arrays.asList(1L, "2", 3L)), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": { \"field1\": 1, \"field2\": 2} }".getBytes()); Map<String, Long> obj = new HashMap<>(); obj.put("field1", 1L); obj.put("field2", 2L); assertEquals(new SchemaAndValue(null, obj), converted); } @Test public void decimalToConnect() { Schema schema = Decimal.schema(2); BigDecimal reference = new BigDecimal(new BigInteger("156"), 2); // Payload is base64 encoded byte[]{0, -100}, which is the two's complement encoding of 156. String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }, \"payload\": \"AJw=\" }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); BigDecimal converted = (BigDecimal) schemaAndValue.value(); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, converted); } @Test public void dateToConnect() { Schema schema = Date.SCHEMA; GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0); calendar.setTimeZone(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.DATE, 10000); java.util.Date reference = calendar.getTime(); String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Date\", \"version\": 1 }, \"payload\": 10000 }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); java.util.Date converted = (java.util.Date) schemaAndValue.value(); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, converted); } @Test public void timeToConnect() { Schema schema = Time.SCHEMA; GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0); calendar.setTimeZone(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.MILLISECOND, 14400000); java.util.Date reference = calendar.getTime(); String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Time\", \"version\": 1 }, \"payload\": 14400000 }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); java.util.Date converted = (java.util.Date) schemaAndValue.value(); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, converted); } @Test public void timestampToConnect() { Schema schema = Timestamp.SCHEMA; GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0); calendar.setTimeZone(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.MILLISECOND, 2000000000); calendar.add(Calendar.MILLISECOND, 2000000000); java.util.Date reference = calendar.getTime(); String msg = "{ \"schema\": { \"type\": \"int64\", \"name\": \"org.apache.kafka.connect.data.Timestamp\", \"version\": 1 }, \"payload\": 4000000000 }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); java.util.Date converted = (java.util.Date) schemaAndValue.value(); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, converted); } // Schema metadata @Test public void testJsonSchemaMetadataTranslation() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BOOLEAN_SCHEMA, true)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(true, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue()); converted = parse(converter.fromConnectData(TOPIC, Schema.OPTIONAL_BOOLEAN_SCHEMA, null)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"boolean\", \"optional\": true }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isNull()); converted = parse(converter.fromConnectData(TOPIC, SchemaBuilder.bool().defaultValue(true).build(), true)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false, \"default\": true }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(true, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue()); converted = parse(converter.fromConnectData(TOPIC, SchemaBuilder.bool().required().name("bool").version(3).doc("the documentation").parameter("foo", "bar").build(), true)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false, \"name\": \"bool\", \"version\": 3, \"doc\": \"the documentation\", \"parameters\": { \"foo\": \"bar\" }}"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(true, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue()); } @Test public void testCacheSchemaToConnectConversion() { Cache<JsonNode, Schema> cache = Whitebox.getInternalState(converter, "toConnectSchemaCache"); assertEquals(0, cache.size()); converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes()); assertEquals(1, cache.size()); converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes()); assertEquals(1, cache.size()); // Different schema should also get cached converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"optional\": true }, \"payload\": true }".getBytes()); assertEquals(2, cache.size()); // Even equivalent, but different JSON encoding of schema, should get different cache entry converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\", \"optional\": false }, \"payload\": true }".getBytes()); assertEquals(3, cache.size()); } // Schema types @Test public void booleanToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BOOLEAN_SCHEMA, true)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(true, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue()); } @Test public void byteToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT8_SCHEMA, (byte) 12)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int8\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(12, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).intValue()); } @Test public void shortToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT16_SCHEMA, (short) 12)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int16\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(12, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).intValue()); } @Test public void intToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT32_SCHEMA, 12)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int32\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(12, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).intValue()); } @Test public void longToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT64_SCHEMA, 4398046511104L)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int64\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(4398046511104L, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).longValue()); } @Test public void floatToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.FLOAT32_SCHEMA, 12.34f)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"float\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(12.34f, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).floatValue(), 0.001); } @Test public void doubleToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.FLOAT64_SCHEMA, 12.34)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"double\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(12.34, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).doubleValue(), 0.001); } @Test public void bytesToJson() throws IOException { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BYTES_SCHEMA, "test-string".getBytes())); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(ByteBuffer.wrap("test-string".getBytes()), ByteBuffer.wrap(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).binaryValue())); } @Test public void stringToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, "test-string")); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"string\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals("test-string", converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).textValue()); } @Test public void arrayToJson() { Schema int32Array = SchemaBuilder.array(Schema.INT32_SCHEMA).build(); JsonNode converted = parse(converter.fromConnectData(TOPIC, int32Array, Arrays.asList(1, 2, 3))); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"array\", \"items\": { \"type\": \"int32\", \"optional\": false }, \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(JsonNodeFactory.instance.arrayNode().add(1).add(2).add(3), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); } @Test public void mapToJsonStringKeys() { Schema stringIntMap = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA).build(); Map<String, Integer> input = new HashMap<>(); input.put("key1", 12); input.put("key2", 15); JsonNode converted = parse(converter.fromConnectData(TOPIC, stringIntMap, input)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"map\", \"keys\": { \"type\" : \"string\", \"optional\": false }, \"values\": { \"type\" : \"int32\", \"optional\": false }, \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(JsonNodeFactory.instance.objectNode().put("key1", 12).put("key2", 15), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); } @Test public void mapToJsonNonStringKeys() { Schema intIntMap = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build(); Map<Integer, Integer> input = new HashMap<>(); input.put(1, 12); input.put(2, 15); JsonNode converted = parse(converter.fromConnectData(TOPIC, intIntMap, input)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"map\", \"keys\": { \"type\" : \"int32\", \"optional\": false }, \"values\": { \"type\" : \"int32\", \"optional\": false }, \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isArray()); ArrayNode payload = (ArrayNode) converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME); assertEquals(2, payload.size()); Set<JsonNode> payloadEntries = new HashSet<>(); for (JsonNode elem : payload) payloadEntries.add(elem); assertEquals(new HashSet<>(Arrays.asList(JsonNodeFactory.instance.arrayNode().add(1).add(12), JsonNodeFactory.instance.arrayNode().add(2).add(15))), payloadEntries ); } @Test public void structToJson() { Schema schema = SchemaBuilder.struct().field("field1", Schema.BOOLEAN_SCHEMA).field("field2", Schema.STRING_SCHEMA).field("field3", Schema.STRING_SCHEMA).field("field4", Schema.BOOLEAN_SCHEMA).build(); Struct input = new Struct(schema).put("field1", true).put("field2", "string2").put("field3", "string3").put("field4", false); JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, input)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"struct\", \"optional\": false, \"fields\": [{ \"field\": \"field1\", \"type\": \"boolean\", \"optional\": false }, { \"field\": \"field2\", \"type\": \"string\", \"optional\": false }, { \"field\": \"field3\", \"type\": \"string\", \"optional\": false }, { \"field\": \"field4\", \"type\": \"boolean\", \"optional\": false }] }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(JsonNodeFactory.instance.objectNode() .put("field1", true) .put("field2", "string2") .put("field3", "string3") .put("field4", false), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); } @Test public void decimalToJson() throws IOException { JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(2), new BigDecimal(new BigInteger("156"), 2))); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertArrayEquals(new byte[]{0, -100}, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).binaryValue()); } @Test public void dateToJson() throws IOException { GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0); calendar.setTimeZone(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.DATE, 10000); java.util.Date date = calendar.getTime(); JsonNode converted = parse(converter.fromConnectData(TOPIC, Date.SCHEMA, date)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int32\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Date\", \"version\": 1 }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); JsonNode payload = converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME); assertTrue(payload.isInt()); assertEquals(10000, payload.intValue()); } @Test public void timeToJson() throws IOException { GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0); calendar.setTimeZone(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.MILLISECOND, 14400000); java.util.Date date = calendar.getTime(); JsonNode converted = parse(converter.fromConnectData(TOPIC, Time.SCHEMA, date)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int32\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Time\", \"version\": 1 }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); JsonNode payload = converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME); assertTrue(payload.isInt()); assertEquals(14400000, payload.longValue()); } @Test public void timestampToJson() throws IOException { GregorianCalendar calendar = new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 0); calendar.setTimeZone(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.MILLISECOND, 2000000000); calendar.add(Calendar.MILLISECOND, 2000000000); java.util.Date date = calendar.getTime(); JsonNode converted = parse(converter.fromConnectData(TOPIC, Timestamp.SCHEMA, date)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int64\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Timestamp\", \"version\": 1 }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); JsonNode payload = converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME); assertTrue(payload.isLong()); assertEquals(4000000000L, payload.longValue()); } @Test public void nullSchemaAndPrimitiveToJson() { // This still needs to do conversion of data, null schema means "anything goes" JsonNode converted = parse(converter.fromConnectData(TOPIC, null, true)); validateEnvelopeNullSchema(converted); assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull()); assertEquals(true, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue()); } @Test public void nullSchemaAndArrayToJson() { // This still needs to do conversion of data, null schema means "anything goes". Make sure we mix and match // types to verify conversion still works. JsonNode converted = parse(converter.fromConnectData(TOPIC, null, Arrays.asList(1, "string", true))); validateEnvelopeNullSchema(converted); assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull()); assertEquals(JsonNodeFactory.instance.arrayNode().add(1).add("string").add(true), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); } @Test public void nullSchemaAndMapToJson() { // This still needs to do conversion of data, null schema means "anything goes". Make sure we mix and match // types to verify conversion still works. Map<String, Object> input = new HashMap<>(); input.put("key1", 12); input.put("key2", "string"); input.put("key3", true); JsonNode converted = parse(converter.fromConnectData(TOPIC, null, input)); validateEnvelopeNullSchema(converted); assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull()); assertEquals(JsonNodeFactory.instance.objectNode().put("key1", 12).put("key2", "string").put("key3", true), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); } @Test public void nullSchemaAndMapNonStringKeysToJson() { // This still needs to do conversion of data, null schema means "anything goes". Make sure we mix and match // types to verify conversion still works. Map<Object, Object> input = new HashMap<>(); input.put("string", 12); input.put(52, "string"); input.put(false, true); JsonNode converted = parse(converter.fromConnectData(TOPIC, null, input)); validateEnvelopeNullSchema(converted); assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull()); assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isArray()); ArrayNode payload = (ArrayNode) converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME); assertEquals(3, payload.size()); Set<JsonNode> payloadEntries = new HashSet<>(); for (JsonNode elem : payload) payloadEntries.add(elem); assertEquals(new HashSet<>(Arrays.asList(JsonNodeFactory.instance.arrayNode().add("string").add(12), JsonNodeFactory.instance.arrayNode().add(52).add("string"), JsonNodeFactory.instance.arrayNode().add(false).add(true))), payloadEntries ); } @Test(expected = DataException.class) public void mismatchSchemaJson() { // If we have mismatching schema info, we should properly convert to a DataException converter.fromConnectData(TOPIC, Schema.FLOAT64_SCHEMA, true); } @Test public void noSchemaToConnect() { Map<String, Boolean> props = Collections.singletonMap("schemas.enable", false); converter.configure(props, true); assertEquals(new SchemaAndValue(null, true), converter.toConnectData(TOPIC, "true".getBytes())); } @Test public void noSchemaToJson() { Map<String, Boolean> props = Collections.singletonMap("schemas.enable", false); converter.configure(props, true); JsonNode converted = parse(converter.fromConnectData(TOPIC, null, true)); assertTrue(converted.isBoolean()); assertEquals(true, converted.booleanValue()); } @Test public void testCacheSchemaToJsonConversion() { Cache<Schema, ObjectNode> cache = Whitebox.getInternalState(converter, "fromConnectSchemaCache"); assertEquals(0, cache.size()); // Repeated conversion of the same schema, even if the schema object is different should return the same Java // object converter.fromConnectData(TOPIC, SchemaBuilder.bool().build(), true); assertEquals(1, cache.size()); converter.fromConnectData(TOPIC, SchemaBuilder.bool().build(), true); assertEquals(1, cache.size()); // Validate that a similar, but different schema correctly returns a different schema. converter.fromConnectData(TOPIC, SchemaBuilder.bool().optional().build(), true); assertEquals(2, cache.size()); } private JsonNode parse(byte[] json) { try { return objectMapper.readTree(json); } catch (IOException e) { fail("IOException during JSON parse: " + e.getMessage()); throw new RuntimeException("failed"); } } private JsonNode parse(String json) { try { return objectMapper.readTree(json); } catch (IOException e) { fail("IOException during JSON parse: " + e.getMessage()); throw new RuntimeException("failed"); } } private void validateEnvelope(JsonNode env) { assertNotNull(env); assertTrue(env.isObject()); assertEquals(2, env.size()); assertTrue(env.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertTrue(env.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isObject()); assertTrue(env.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); } private void validateEnvelopeNullSchema(JsonNode env) { assertNotNull(env); assertTrue(env.isObject()); assertEquals(2, env.size()); assertTrue(env.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertTrue(env.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull()); assertTrue(env.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.view; import java.nio.ByteBuffer; import java.util.*; import com.google.common.collect.Iterators; import com.google.common.collect.PeekingIterator; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.db.*; import org.apache.cassandra.db.rows.*; import org.apache.cassandra.db.partitions.*; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.CompositeType; /** * Creates the updates to apply to a view given the existing rows in the base * table and the updates that we're applying to them (this handles updates * on a single partition only). * * This class is used by passing the updates made to the base table to * {@link #addBaseTableUpdate} and calling {@link #generateViewUpdates} once all updates have * been handled to get the resulting view mutations. */ public class ViewUpdateGenerator { private final View view; private final int nowInSec; private final CFMetaData baseMetadata; private final DecoratedKey baseDecoratedKey; private final ByteBuffer[] basePartitionKey; private final CFMetaData viewMetadata; private final Map<DecoratedKey, PartitionUpdate> updates = new HashMap<>(); // Reused internally to build a new entry private final ByteBuffer[] currentViewEntryPartitionKey; private final Row.Builder currentViewEntryBuilder; /** * The type of type update action to perform to the view for a given base table * update. */ private enum UpdateAction { NONE, // There was no view entry and none should be added NEW_ENTRY, // There was no entry but there is one post-update DELETE_OLD, // There was an entry but there is nothing after update UPDATE_EXISTING, // There was an entry and the update modifies it SWITCH_ENTRY // There was an entry and there is still one after update, // but they are not the same one. }; /** * Creates a new {@code ViewUpdateBuilder}. * * @param view the view for which this will be building updates for. * @param basePartitionKey the partition key for the base table partition for which * we'll handle updates for. * @param nowInSec the current time in seconds. Used to decide if data are live or not * and as base reference for new deletions. */ public ViewUpdateGenerator(View view, DecoratedKey basePartitionKey, int nowInSec) { this.view = view; this.nowInSec = nowInSec; this.baseMetadata = view.getDefinition().baseTableMetadata(); this.baseDecoratedKey = basePartitionKey; this.basePartitionKey = extractKeyComponents(basePartitionKey, baseMetadata.getKeyValidator()); this.viewMetadata = view.getDefinition().metadata; this.currentViewEntryPartitionKey = new ByteBuffer[viewMetadata.partitionKeyColumns().size()]; this.currentViewEntryBuilder = BTreeRow.sortedBuilder(); } private static ByteBuffer[] extractKeyComponents(DecoratedKey partitionKey, AbstractType<?> type) { return type instanceof CompositeType ? ((CompositeType)type).split(partitionKey.getKey()) : new ByteBuffer[]{ partitionKey.getKey() }; } /** * Adds to this generator the updates to be made to the view given a base table row * before and after an update. * * @param existingBaseRow the base table row as it is before an update. * @param mergedBaseRow the base table row after the update is applied (note that * this is not just the new update, but rather the resulting row). */ public void addBaseTableUpdate(Row existingBaseRow, Row mergedBaseRow) { switch (updateAction(existingBaseRow, mergedBaseRow)) { case NONE: return; case NEW_ENTRY: createEntry(mergedBaseRow); return; case DELETE_OLD: deleteOldEntry(existingBaseRow); return; case UPDATE_EXISTING: updateEntry(existingBaseRow, mergedBaseRow); return; case SWITCH_ENTRY: createEntry(mergedBaseRow); deleteOldEntry(existingBaseRow); return; } } /** * Returns the updates that needs to be done to the view given the base table updates * passed to {@link #addBaseTableUpdate}. * * @return the updates to do to the view. */ public Collection<PartitionUpdate> generateViewUpdates() { return updates.values(); } /** * Clears the current state so that the generator may be reused. */ public void clear() { updates.clear(); } /** * Compute which type of action needs to be performed to the view for a base table row * before and after an update. */ private UpdateAction updateAction(Row existingBaseRow, Row mergedBaseRow) { // Having existing empty is useful, it just means we'll insert a brand new entry for mergedBaseRow, // but if we have no update at all, we shouldn't get there. assert !mergedBaseRow.isEmpty(); // Note that none of the base PK columns will differ since we're intrinsically dealing // with the same base row. So we have to check 3 things: // 1) that the clustering doesn't have a null, which can happen for compact tables. If that's the case, // there is no corresponding entries. // 2) if there is a column not part of the base PK in the view PK, whether it is changed by the update. // 3) whether mergedBaseRow actually match the view SELECT filter if (baseMetadata.isCompactTable()) { Clustering clustering = mergedBaseRow.clustering(); for (int i = 0; i < clustering.size(); i++) { if (clustering.get(i) == null) return UpdateAction.NONE; } } assert view.baseNonPKColumnsInViewPK.size() <= 1 : "We currently only support one base non-PK column in the view PK"; if (view.baseNonPKColumnsInViewPK.isEmpty()) { // The view entry is necessarily the same pre and post update. // Note that we allow existingBaseRow to be null and treat it as empty (see MultiViewUpdateBuilder.generateViewsMutations). boolean existingHasLiveData = existingBaseRow != null && existingBaseRow.hasLiveData(nowInSec); boolean mergedHasLiveData = mergedBaseRow.hasLiveData(nowInSec); return existingHasLiveData ? (mergedHasLiveData ? UpdateAction.UPDATE_EXISTING : UpdateAction.DELETE_OLD) : (mergedHasLiveData ? UpdateAction.NEW_ENTRY : UpdateAction.NONE); } ColumnDefinition baseColumn = view.baseNonPKColumnsInViewPK.get(0); assert !baseColumn.isComplex() : "A complex column couldn't be part of the view PK"; Cell before = existingBaseRow == null ? null : existingBaseRow.getCell(baseColumn); Cell after = mergedBaseRow.getCell(baseColumn); // If the update didn't modified this column, the cells will be the same object so it's worth checking if (before == after) return isLive(before) ? UpdateAction.UPDATE_EXISTING : UpdateAction.NONE; if (!isLive(before)) return isLive(after) ? UpdateAction.NEW_ENTRY : UpdateAction.NONE; if (!isLive(after)) return UpdateAction.DELETE_OLD; return baseColumn.cellValueType().compare(before.value(), after.value()) == 0 ? UpdateAction.UPDATE_EXISTING : UpdateAction.SWITCH_ENTRY; } private boolean matchesViewFilter(Row baseRow) { return view.matchesViewFilter(baseDecoratedKey, baseRow, nowInSec); } private boolean isLive(Cell cell) { return cell != null && cell.isLive(nowInSec); } /** * Creates a view entry corresponding to the provided base row. * <p> * This method checks that the base row does match the view filter before applying it. */ private void createEntry(Row baseRow) { // Before create a new entry, make sure it matches the view filter if (!matchesViewFilter(baseRow)) return; startNewUpdate(baseRow); currentViewEntryBuilder.addPrimaryKeyLivenessInfo(computeLivenessInfoForEntry(baseRow)); currentViewEntryBuilder.addRowDeletion(baseRow.deletion()); for (ColumnData data : baseRow) { ColumnDefinition viewColumn = view.getViewColumn(data.column()); // If that base table column is not denormalized in the view, we had nothing to do. // Alose, if it's part of the view PK it's already been taken into account in the clustering. if (viewColumn == null || viewColumn.isPrimaryKeyColumn()) continue; addColumnData(viewColumn, data); } submitUpdate(); } /** * Creates the updates to apply to the existing view entry given the base table row before * and after the update, assuming that the update hasn't changed to which view entry the * row correspond (that is, we know the columns composing the view PK haven't changed). * <p> * This method checks that the base row (before and after) does match the view filter before * applying anything. */ private void updateEntry(Row existingBaseRow, Row mergedBaseRow) { // While we know existingBaseRow and mergedBaseRow are corresponding to the same view entry, // they may not match the view filter. if (!matchesViewFilter(existingBaseRow)) { createEntry(mergedBaseRow); return; } if (!matchesViewFilter(mergedBaseRow)) { deleteOldEntryInternal(existingBaseRow); return; } startNewUpdate(mergedBaseRow); // In theory, it may be the PK liveness and row deletion hasn't been change by the update // and we could condition the 2 additions below. In practice though, it's as fast (if not // faster) to compute those info than to check if they have changed so we keep it simple. currentViewEntryBuilder.addPrimaryKeyLivenessInfo(computeLivenessInfoForEntry(mergedBaseRow)); currentViewEntryBuilder.addRowDeletion(mergedBaseRow.deletion()); // We only add to the view update the cells from mergedBaseRow that differs from // existingBaseRow. For that and for speed we can just cell pointer equality: if the update // hasn't touched a cell, we know it will be the same object in existingBaseRow and // mergedBaseRow (note that including more cells than we strictly should isn't a problem // for correction, so even if the code change and pointer equality don't work anymore, it'll // only a slightly inefficiency which we can fix then). // Note: we could alternatively use Rows.diff() for this, but because it is a bit more generic // than what we need here, it's also a bit less efficient (it allocates more in particular), // and this might be called a lot of time for view updates. So, given that this is not a whole // lot of code anyway, it's probably doing the diff manually. PeekingIterator<ColumnData> existingIter = Iterators.peekingIterator(existingBaseRow.iterator()); for (ColumnData mergedData : mergedBaseRow) { ColumnDefinition baseColumn = mergedData.column(); ColumnDefinition viewColumn = view.getViewColumn(baseColumn); // If that base table column is not denormalized in the view, we had nothing to do. // Alose, if it's part of the view PK it's already been taken into account in the clustering. if (viewColumn == null || viewColumn.isPrimaryKeyColumn()) continue; ColumnData existingData = null; // Find if there is data for that column in the existing row while (existingIter.hasNext()) { int cmp = baseColumn.compareTo(existingIter.peek().column()); if (cmp < 0) break; ColumnData next = existingIter.next(); if (cmp == 0) { existingData = next; break; } } if (existingData == null) { addColumnData(viewColumn, mergedData); continue; } if (mergedData == existingData) continue; if (baseColumn.isComplex()) { ComplexColumnData mergedComplexData = (ComplexColumnData)mergedData; ComplexColumnData existingComplexData = (ComplexColumnData)existingData; if (mergedComplexData.complexDeletion().supersedes(existingComplexData.complexDeletion())) currentViewEntryBuilder.addComplexDeletion(viewColumn, mergedComplexData.complexDeletion()); PeekingIterator<Cell> existingCells = Iterators.peekingIterator(existingComplexData.iterator()); for (Cell mergedCell : mergedComplexData) { Cell existingCell = null; // Find if there is corresponding cell in the existing row while (existingCells.hasNext()) { int cmp = baseColumn.cellPathComparator().compare(mergedCell.path(), existingCells.peek().path()); if (cmp > 0) break; Cell next = existingCells.next(); if (cmp == 0) { existingCell = next; break; } } if (mergedCell != existingCell) addCell(viewColumn, mergedCell); } } else { // Note that we've already eliminated the case where merged == existing addCell(viewColumn, (Cell)mergedData); } } submitUpdate(); } /** * Deletes the view entry corresponding to the provided base row. * <p> * This method checks that the base row does match the view filter before bothering. */ private void deleteOldEntry(Row existingBaseRow) { // Before deleting an old entry, make sure it was matching the view filter (otherwise there is nothing to delete) if (!matchesViewFilter(existingBaseRow)) return; deleteOldEntryInternal(existingBaseRow); } private void deleteOldEntryInternal(Row existingBaseRow) { startNewUpdate(existingBaseRow); DeletionTime dt = new DeletionTime(computeTimestampForEntryDeletion(existingBaseRow), nowInSec); currentViewEntryBuilder.addRowDeletion(Row.Deletion.shadowable(dt)); submitUpdate(); } /** * Computes the partition key and clustering for a new view entry, and setup the internal * row builder for the new row. * * This assumes that there is corresponding entry, i.e. no values for the partition key and * clustering are null (since we have eliminated that case through updateAction). */ private void startNewUpdate(Row baseRow) { ByteBuffer[] clusteringValues = new ByteBuffer[viewMetadata.clusteringColumns().size()]; for (ColumnDefinition viewColumn : viewMetadata.primaryKeyColumns()) { ColumnDefinition baseColumn = view.getBaseColumn(viewColumn); ByteBuffer value = getValueForPK(baseColumn, baseRow); if (viewColumn.isPartitionKey()) currentViewEntryPartitionKey[viewColumn.position()] = value; else clusteringValues[viewColumn.position()] = value; } currentViewEntryBuilder.newRow(Clustering.make(clusteringValues)); } private LivenessInfo computeLivenessInfoForEntry(Row baseRow) { /* * We need to compute both the timestamp and expiration. * * For the timestamp, it makes sense to use the bigger timestamp for all view PK columns. * * This is more complex for the expiration. We want to maintain consistency between the base and the view, so the * entry should only exist as long as the base row exists _and_ has non-null values for all the columns that are part * of the view PK. * Which means we really have 2 cases: * 1) either the columns for the base and view PKs are exactly the same: in that case, the view entry should live * as long as the base row lives. This means the view entry should only expire once *everything* in the base row * has expired. Which means the row TTL should be the max of any other TTL. * 2) or there is a column that is not in the base PK but is in the view PK (we can only have one so far, we'll need * to slightly adapt if we allow more later): in that case, as long as that column lives the entry does too, but * as soon as it expires (or is deleted for that matter) the entry also should expire. So the expiration for the * view is the one of that column, irregarding of any other expiration. * To take an example of that case, if you have: * CREATE TABLE t (a int, b int, c int, PRIMARY KEY (a, b)) * CREATE MATERIALIZED VIEW mv AS SELECT * FROM t WHERE c IS NOT NULL AND a IS NOT NULL AND b IS NOT NULL PRIMARY KEY (c, a, b) * INSERT INTO t(a, b) VALUES (0, 0) USING TTL 3; * UPDATE t SET c = 0 WHERE a = 0 AND b = 0; * then even after 3 seconds elapsed, the row will still exist (it just won't have a "row marker" anymore) and so * the MV should still have a corresponding entry. */ assert view.baseNonPKColumnsInViewPK.size() <= 1; // This may change, but is currently an enforced limitation LivenessInfo baseLiveness = baseRow.primaryKeyLivenessInfo(); if (view.baseNonPKColumnsInViewPK.isEmpty()) { int ttl = baseLiveness.ttl(); int expirationTime = baseLiveness.localExpirationTime(); for (Cell cell : baseRow.cells()) { if (cell.ttl() > ttl) { ttl = cell.ttl(); expirationTime = cell.localDeletionTime(); } } return ttl == baseLiveness.ttl() ? baseLiveness : LivenessInfo.withExpirationTime(baseLiveness.timestamp(), ttl, expirationTime); } ColumnDefinition baseColumn = view.baseNonPKColumnsInViewPK.get(0); Cell cell = baseRow.getCell(baseColumn); assert isLive(cell) : "We shouldn't have got there if the base row had no associated entry"; long timestamp = Math.max(baseLiveness.timestamp(), cell.timestamp()); return LivenessInfo.withExpirationTime(timestamp, cell.ttl(), cell.localDeletionTime()); } private long computeTimestampForEntryDeletion(Row baseRow) { // We delete the old row with it's row entry timestamp using a shadowable deletion. // We must make sure that the deletion deletes everything in the entry (or the entry will // still show up), so we must use the bigger timestamp found in the existing row (for any // column included in the view at least). // TODO: We have a problem though: if the entry is "resurected" by a later update, we would // need to ensure that the timestamp for then entry then is bigger than the tombstone // we're just inserting, which is not currently guaranteed. // This is a bug for a separate ticket though. long timestamp = baseRow.primaryKeyLivenessInfo().timestamp(); for (ColumnData data : baseRow) { if (!view.getDefinition().includes(data.column().name)) continue; timestamp = Math.max(timestamp, data.maxTimestamp()); } return timestamp; } private void addColumnData(ColumnDefinition viewColumn, ColumnData baseTableData) { assert viewColumn.isComplex() == baseTableData.column().isComplex(); if (!viewColumn.isComplex()) { addCell(viewColumn, (Cell)baseTableData); return; } ComplexColumnData complexData = (ComplexColumnData)baseTableData; currentViewEntryBuilder.addComplexDeletion(viewColumn, complexData.complexDeletion()); for (Cell cell : complexData) addCell(viewColumn, cell); } private void addCell(ColumnDefinition viewColumn, Cell baseTableCell) { assert !viewColumn.isPrimaryKeyColumn(); currentViewEntryBuilder.addCell(baseTableCell.withUpdatedColumn(viewColumn)); } /** * Finish building the currently updated view entry and add it to the other built * updates. */ private void submitUpdate() { Row row = currentViewEntryBuilder.build(); // I'm not sure we can reach there is there is nothing is updated, but adding an empty row breaks things // and it costs us nothing to be prudent here. if (row.isEmpty()) return; DecoratedKey partitionKey = makeCurrentPartitionKey(); PartitionUpdate update = updates.get(partitionKey); if (update == null) { // We can't really know which columns of the view will be updated nor how many row will be updated for this key // so we rely on hopefully sane defaults. update = new PartitionUpdate(viewMetadata, partitionKey, viewMetadata.partitionColumns(), 4); updates.put(partitionKey, update); } update.add(row); } private DecoratedKey makeCurrentPartitionKey() { ByteBuffer rawKey = viewMetadata.partitionKeyColumns().size() == 1 ? currentViewEntryPartitionKey[0] : CompositeType.build(currentViewEntryPartitionKey); return viewMetadata.decorateKey(rawKey); } private ByteBuffer getValueForPK(ColumnDefinition column, Row row) { switch (column.kind) { case PARTITION_KEY: return basePartitionKey[column.position()]; case CLUSTERING: return row.clustering().get(column.position()); default: // This shouldn't NPE as we shouldn't get there if the value can be null (or there is a bug in updateAction()) return row.getCell(column).value(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.List; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.RedundantKVGenerator; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Category({IOTests.class, SmallTests.class}) public class TestHFileEncryption { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestHFileEncryption.class); private static final Logger LOG = LoggerFactory.getLogger(TestHFileEncryption.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static FileSystem fs; private static Encryption.Context cryptoContext; @BeforeClass public static void setUp() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); // Disable block cache in this test. conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f); conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName()); conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"); conf.setInt("hfile.format.version", 3); fs = FileSystem.get(conf); cryptoContext = Encryption.newContext(conf); String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); Cipher aes = Encryption.getCipher(conf, algorithm); assertNotNull(aes); cryptoContext.setCipher(aes); byte[] key = new byte[aes.getKeyLength()]; Bytes.secureRandom(key); cryptoContext.setKey(key); } private int writeBlock(Configuration conf, FSDataOutputStream os, HFileContext fileContext, int size) throws IOException { HFileBlock.Writer hbw = new HFileBlock.Writer(conf, null, fileContext); DataOutputStream dos = hbw.startWriting(BlockType.DATA); for (int j = 0; j < size; j++) { dos.writeInt(j); } hbw.writeHeaderAndData(os); LOG.info("Wrote a block at " + os.getPos() + " with" + " onDiskSizeWithHeader=" + hbw.getOnDiskSizeWithHeader() + " uncompressedSizeWithoutHeader=" + hbw.getOnDiskSizeWithoutHeader() + " uncompressedSizeWithoutHeader=" + hbw.getUncompressedSizeWithoutHeader()); return hbw.getOnDiskSizeWithHeader(); } private long readAndVerifyBlock(long pos, HFileContext ctx, HFileBlock.FSReaderImpl hbr, int size) throws IOException { HFileBlock b = hbr.readBlockData(pos, -1, false, false, true); assertEquals(0, HFile.getAndResetChecksumFailuresCount()); b.sanityCheck(); assertFalse((b.getHFileContext().getCompression() != Compression.Algorithm.NONE) && b.isUnpacked()); b = b.unpack(ctx, hbr); LOG.info("Read a block at " + pos + " with" + " onDiskSizeWithHeader=" + b.getOnDiskSizeWithHeader() + " uncompressedSizeWithoutHeader=" + b.getOnDiskSizeWithoutHeader() + " uncompressedSizeWithoutHeader=" + b.getUncompressedSizeWithoutHeader()); DataInputStream dis = b.getByteStream(); for (int i = 0; i < size; i++) { int read = dis.readInt(); if (read != i) { fail("Block data corrupt at element " + i); } } return b.getOnDiskSizeWithHeader(); } @Test public void testDataBlockEncryption() throws IOException { final int blocks = 10; final int[] blockSizes = new int[blocks]; final Random rand = ThreadLocalRandom.current(); for (int i = 0; i < blocks; i++) { blockSizes[i] = (1024 + rand.nextInt(1024 * 63)) / Bytes.SIZEOF_INT; } for (Compression.Algorithm compression : HBaseCommonTestingUtil.COMPRESSION_ALGORITHMS) { Path path = new Path(TEST_UTIL.getDataTestDir(), "block_v3_" + compression + "_AES"); LOG.info("testDataBlockEncryption: encryption=AES compression=" + compression); long totalSize = 0; HFileContext fileContext = new HFileContextBuilder() .withCompression(compression) .withEncryptionContext(cryptoContext) .build(); FSDataOutputStream os = fs.create(path); try { for (int i = 0; i < blocks; i++) { totalSize += writeBlock(TEST_UTIL.getConfiguration(), os, fileContext, blockSizes[i]); } } finally { os.close(); } FSDataInputStream is = fs.open(path); ReaderContext context = new ReaderContextBuilder() .withInputStreamWrapper(new FSDataInputStreamWrapper(is)) .withFilePath(path) .withFileSystem(fs) .withFileSize(totalSize).build(); try { HFileBlock.FSReaderImpl hbr = new HFileBlock.FSReaderImpl(context, fileContext, ByteBuffAllocator.HEAP, TEST_UTIL.getConfiguration()); long pos = 0; for (int i = 0; i < blocks; i++) { pos += readAndVerifyBlock(pos, fileContext, hbr, blockSizes[i]); } } finally { is.close(); } } } @Test public void testHFileEncryptionMetadata() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); CacheConfig cacheConf = new CacheConfig(conf); HFileContext fileContext = new HFileContextBuilder() .withEncryptionContext(cryptoContext) .build(); // write a simple encrypted hfile Path path = new Path(TEST_UTIL.getDataTestDir(), "cryptometa.hfile"); FSDataOutputStream out = fs.create(path); HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(out) .withFileContext(fileContext) .create(); try { KeyValue kv = new KeyValue(Bytes.toBytes("foo"), Bytes.toBytes("f1"), null, Bytes.toBytes("value")); writer.append(kv); } finally { writer.close(); out.close(); } // read it back in and validate correct crypto metadata HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf); try { FixedFileTrailer trailer = reader.getTrailer(); assertNotNull(trailer.getEncryptionKey()); Encryption.Context readerContext = reader.getFileContext().getEncryptionContext(); assertEquals(readerContext.getCipher().getName(), cryptoContext.getCipher().getName()); assertTrue(Bytes.equals(readerContext.getKeyBytes(), cryptoContext.getKeyBytes())); } finally { reader.close(); } } @Test public void testHFileEncryption() throws Exception { // Create 1000 random test KVs RedundantKVGenerator generator = new RedundantKVGenerator(); List<KeyValue> testKvs = generator.generateTestKeyValues(1000); // Iterate through data block encoding and compression combinations Configuration conf = TEST_UTIL.getConfiguration(); CacheConfig cacheConf = new CacheConfig(conf); for (DataBlockEncoding encoding: DataBlockEncoding.values()) { for (Compression.Algorithm compression: HBaseCommonTestingUtil.COMPRESSION_ALGORITHMS) { HFileContext fileContext = new HFileContextBuilder() .withBlockSize(4096) // small blocks .withEncryptionContext(cryptoContext) .withCompression(compression) .withDataBlockEncoding(encoding) .build(); // write a new test HFile LOG.info("Writing with " + fileContext); Path path = new Path(TEST_UTIL.getDataTestDir(), HBaseCommonTestingUtil.getRandomUUID().toString() + ".hfile"); FSDataOutputStream out = fs.create(path); HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(out) .withFileContext(fileContext) .create(); try { for (KeyValue kv: testKvs) { writer.append(kv); } } finally { writer.close(); out.close(); } // read it back in LOG.info("Reading with " + fileContext); int i = 0; HFileScanner scanner = null; HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf); try { FixedFileTrailer trailer = reader.getTrailer(); assertNotNull(trailer.getEncryptionKey()); scanner = reader.getScanner(conf, false, false); assertTrue("Initial seekTo failed", scanner.seekTo()); do { Cell kv = scanner.getCell(); assertTrue("Read back an unexpected or invalid KV", testKvs.contains(KeyValueUtil.ensureKeyValue(kv))); i++; } while (scanner.next()); } finally { reader.close(); scanner.close(); } assertEquals("Did not read back as many KVs as written", i, testKvs.size()); // Test random seeks with pread LOG.info("Random seeking with " + fileContext); Random rand = ThreadLocalRandom.current(); reader = HFile.createReader(fs, path, cacheConf, true, conf); try { scanner = reader.getScanner(conf, false, true); assertTrue("Initial seekTo failed", scanner.seekTo()); for (i = 0; i < 100; i++) { KeyValue kv = testKvs.get(rand.nextInt(testKvs.size())); assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv)); } } finally { scanner.close(); reader.close(); } } } } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package a_vcard.android.syncml.pim.vcard; //import android.content.AbstractSyncableContentProvider; //import android.content.ContentResolver; //import android.content.ContentUris; //import android.content.ContentValues; //import android.net.Uri; import a_vcard.android.provider.Contacts; import a_vcard.android.provider.Contacts.ContactMethods; //import android.provider.Contacts.Extensions; //import android.provider.Contacts.GroupMembership; //import android.provider.Contacts.Organizations; //import android.provider.Contacts.People; import a_vcard.android.provider.Contacts.Phones; //import android.provider.Contacts.Photos; import a_vcard.android.syncml.pim.PropertyNode; import a_vcard.android.syncml.pim.VNode; import a_vcard.android.telephony.PhoneNumberUtils; import a_vcard.android.text.TextUtils; import a_vcard.android.util.Log; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; /** * The parameter class of VCardComposer. * This class standy by the person-contact in * Android system, we must use this class instance as parameter to transmit to * VCardComposer so that create vCard string. */ // TODO: rename the class name, next step public class ContactStruct { private static final String LOG_TAG = "ContactStruct"; // Note: phonetic name probably should be "LAST FIRST MIDDLE" for European languages, and // space should be added between each element while it should not be in Japanese. // But unfortunately, we currently do not have the data and are not sure whether we should // support European version of name ordering. // // TODO: Implement the logic described above if we really need European version of // phonetic name handling. Also, adding the appropriate test case of vCard would be // highly appreciated. public static final int NAME_ORDER_TYPE_ENGLISH = 0; public static final int NAME_ORDER_TYPE_JAPANESE = 1; /** MUST exist */ public String name; public String phoneticName; /** maybe folding */ public List<String> notes = new ArrayList<String>(); /** maybe folding */ public String title; /** binary bytes of pic. */ public byte[] photoBytes; /** The type of Photo (e.g. JPEG, BMP, etc.) */ public String photoType; /** Only for GET. Use addPhoneList() to PUT. */ public List<PhoneData> phoneList; /** Only for GET. Use addContactmethodList() to PUT. */ public List<ContactMethod> contactmethodList; /** Only for GET. Use addOrgList() to PUT. */ public List<OrganizationData> organizationList; /** Only for GET. Use addExtension() to PUT */ public Map<String, List<String>> extensionMap; // Use organizationList instead when handling ORG. @Deprecated public String company; public static class PhoneData { public int type; /** maybe folding */ public String data; public String label; public boolean isPrimary; } public static class ContactMethod { // Contacts.KIND_EMAIL, Contacts.KIND_POSTAL public int kind; // e.g. Contacts.ContactMethods.TYPE_HOME, Contacts.PhoneColumns.TYPE_HOME // If type == Contacts.PhoneColumns.TYPE_CUSTOM, label is used. public int type; public String data; // Used only when TYPE is TYPE_CUSTOM. public String label; public boolean isPrimary; } public static class OrganizationData { public int type; public String companyName; public String positionName; public boolean isPrimary; } /** * Add a phone info to phoneList. * @param data phone number * @param type type col of content://contacts/phones * @param label lable col of content://contacts/phones */ public void addPhone(int type, String data, String label, boolean isPrimary){ if (phoneList == null) { phoneList = new ArrayList<PhoneData>(); } PhoneData phoneData = new PhoneData(); phoneData.type = type; StringBuilder builder = new StringBuilder(); String trimed = data.trim(); int length = trimed.length(); for (int i = 0; i < length; i++) { char ch = trimed.charAt(i); if (('0' <= ch && ch <= '9') || (i == 0 && ch == '+')) { builder.append(ch); } } phoneData.data = PhoneNumberUtils.formatNumber(builder.toString()); phoneData.label = label; phoneData.isPrimary = isPrimary; phoneList.add(phoneData); } /** * Add a contactmethod info to contactmethodList. * @param kind integer value defined in Contacts.java * (e.g. Contacts.KIND_EMAIL) * @param type type col of content://contacts/contact_methods * @param data contact data * @param label extra string used only when kind is Contacts.KIND_CUSTOM. */ public void addContactmethod(int kind, int type, String data, String label, boolean isPrimary){ if (contactmethodList == null) { contactmethodList = new ArrayList<ContactMethod>(); } ContactMethod contactMethod = new ContactMethod(); contactMethod.kind = kind; contactMethod.type = type; contactMethod.data = data; contactMethod.label = label; contactMethod.isPrimary = isPrimary; contactmethodList.add(contactMethod); } /** * Add a Organization info to organizationList. */ public void addOrganization(int type, String companyName, String positionName, boolean isPrimary) { if (organizationList == null) { organizationList = new ArrayList<OrganizationData>(); } OrganizationData organizationData = new OrganizationData(); organizationData.type = type; organizationData.companyName = companyName; organizationData.positionName = positionName; organizationData.isPrimary = isPrimary; organizationList.add(organizationData); } /** * Set "position" value to the appropriate data. If there's more than one * OrganizationData objects, the value is set to the last one. If there's no * OrganizationData object, a new OrganizationData is created, whose company name is * empty. * * TODO: incomplete logic. fix this: * * e.g. This assumes ORG comes earlier, but TITLE may come earlier like this, though we do not * know how to handle it in general cases... * ---- * TITLE:Software Engineer * ORG:Google * ---- */ public void setPosition(String positionValue) { if (organizationList == null) { organizationList = new ArrayList<OrganizationData>(); } int size = organizationList.size(); if (size == 0) { addOrganization(Contacts.OrganizationColumns.TYPE_OTHER, "", null, false); size = 1; } OrganizationData lastData = organizationList.get(size - 1); lastData.positionName = positionValue; } public void addExtension(PropertyNode propertyNode) { if (propertyNode.propValue.length() == 0) { return; } // Now store the string into extensionMap. List<String> list; String name = propertyNode.propName; if (extensionMap == null) { extensionMap = new HashMap<String, List<String>>(); } if (!extensionMap.containsKey(name)){ list = new ArrayList<String>(); extensionMap.put(name, list); } else { list = extensionMap.get(name); } list.add(propertyNode.encode()); } private static String getNameFromNProperty(List<String> elems, int nameOrderType) { // Family, Given, Middle, Prefix, Suffix. (1 - 5) int size = elems.size(); if (size > 1) { StringBuilder builder = new StringBuilder(); boolean builderIsEmpty = true; // Prefix if (size > 3 && elems.get(3).length() > 0) { builder.append(elems.get(3)); builderIsEmpty = false; } String first, second; if (nameOrderType == NAME_ORDER_TYPE_JAPANESE) { first = elems.get(0); second = elems.get(1); } else { first = elems.get(1); second = elems.get(0); } if (first.length() > 0) { if (!builderIsEmpty) { builder.append(' '); } builder.append(first); builderIsEmpty = false; } // Middle name if (size > 2 && elems.get(2).length() > 0) { if (!builderIsEmpty) { builder.append(' '); } builder.append(elems.get(2)); builderIsEmpty = false; } if (second.length() > 0) { if (!builderIsEmpty) { builder.append(' '); } builder.append(second); builderIsEmpty = false; } // Suffix if (size > 4 && elems.get(4).length() > 0) { if (!builderIsEmpty) { builder.append(' '); } builder.append(elems.get(4)); builderIsEmpty = false; } return builder.toString(); } else if (size == 1) { return elems.get(0); } else { return ""; } } public static ContactStruct constructContactFromVNode(VNode node, int nameOrderType) { if (!node.VName.equals("VCARD")) { // Impossible in current implementation. Just for safety. Log.e(LOG_TAG, "Non VCARD data is inserted."); return null; } // For name, there are three fields in vCard: FN, N, NAME. // We prefer FN, which is a required field in vCard 3.0 , but not in vCard 2.1. // Next, we prefer NAME, which is defined only in vCard 3.0. // Finally, we use N, which is a little difficult to parse. String fullName = null; String nameFromNProperty = null; // Some vCard has "X-PHONETIC-FIRST-NAME", "X-PHONETIC-MIDDLE-NAME", and // "X-PHONETIC-LAST-NAME" String xPhoneticFirstName = null; String xPhoneticMiddleName = null; String xPhoneticLastName = null; ContactStruct contact = new ContactStruct(); // Each Column of four properties has ISPRIMARY field // (See android.provider.Contacts) // If false even after the following loop, we choose the first // entry as a "primary" entry. boolean prefIsSetAddress = false; boolean prefIsSetPhone = false; boolean prefIsSetEmail = false; boolean prefIsSetOrganization = false; for (PropertyNode propertyNode: node.propList) { String name = propertyNode.propName; if (TextUtils.isEmpty(propertyNode.propValue)) { continue; } if (name.equals("VERSION")) { // vCard version. Ignore this. } else if (name.equals("FN")) { fullName = propertyNode.propValue; } else if (name.equals("NAME") && fullName == null) { // Only in vCard 3.0. Use this if FN does not exist. // Though, note that vCard 3.0 requires FN. fullName = propertyNode.propValue; } else if (name.equals("N")) { nameFromNProperty = getNameFromNProperty(propertyNode.propValue_vector, nameOrderType); } else if (name.equals("SORT-STRING")) { contact.phoneticName = propertyNode.propValue; } else if (name.equals("SOUND")) { if (propertyNode.paramMap_TYPE.contains("X-IRMC-N") && contact.phoneticName == null) { // Some Japanese mobile phones use this field for phonetic name, // since vCard 2.1 does not have "SORT-STRING" type. // Also, in some cases, the field has some ';' in it. // We remove them. StringBuilder builder = new StringBuilder(); String value = propertyNode.propValue; int length = value.length(); for (int i = 0; i < length; i++) { char ch = value.charAt(i); if (ch != ';') { builder.append(ch); } } contact.phoneticName = builder.toString(); } else { contact.addExtension(propertyNode); } } else if (name.equals("ADR")) { List<String> values = propertyNode.propValue_vector; boolean valuesAreAllEmpty = true; for (String value : values) { if (value.length() > 0) { valuesAreAllEmpty = false; break; } } if (valuesAreAllEmpty) { continue; } int kind = Contacts.KIND_POSTAL; int type = -1; String label = ""; boolean isPrimary = false; for (String typeString : propertyNode.paramMap_TYPE) { if (typeString.equals("PREF") && !prefIsSetAddress) { // Only first "PREF" is considered. prefIsSetAddress = true; isPrimary = true; } else if (typeString.equalsIgnoreCase("HOME")) { type = Contacts.ContactMethodsColumns.TYPE_HOME; label = ""; } else if (typeString.equalsIgnoreCase("WORK") || typeString.equalsIgnoreCase("COMPANY")) { // "COMPANY" seems emitted by Windows Mobile, which is not // specifically supported by vCard 2.1. We assume this is same // as "WORK". type = Contacts.ContactMethodsColumns.TYPE_WORK; label = ""; } else if (typeString.equalsIgnoreCase("POSTAL")) { kind = Contacts.KIND_POSTAL; } else if (typeString.equalsIgnoreCase("PARCEL") || typeString.equalsIgnoreCase("DOM") || typeString.equalsIgnoreCase("INTL")) { // We do not have a kind or type matching these. // TODO: fix this. We may need to split entries into two. // (e.g. entries for KIND_POSTAL and KIND_PERCEL) } else if (typeString.toUpperCase().startsWith("X-") && type < 0) { type = Contacts.ContactMethodsColumns.TYPE_CUSTOM; label = typeString.substring(2); } else if (type < 0) { // vCard 3.0 allows iana-token. Also some vCard 2.1 exporters // emit non-standard types. We do not handle their values now. type = Contacts.ContactMethodsColumns.TYPE_CUSTOM; label = typeString; } } // We use "HOME" as default if (type < 0) { type = Contacts.ContactMethodsColumns.TYPE_HOME; } // adr-value = 0*6(text-value ";") text-value // ; PO Box, Extended Address, Street, Locality, Region, Postal // ; Code, Country Name String address; List<String> list = propertyNode.propValue_vector; int size = list.size(); if (size > 1) { StringBuilder builder = new StringBuilder(); boolean builderIsEmpty = true; if (Locale.getDefault().getCountry().equals(Locale.JAPAN.getCountry())) { // In Japan, the order is reversed. for (int i = size - 1; i >= 0; i--) { String addressPart = list.get(i); if (addressPart.length() > 0) { if (!builderIsEmpty) { builder.append(' '); } builder.append(addressPart); builderIsEmpty = false; } } } else { for (int i = 0; i < size; i++) { String addressPart = list.get(i); if (addressPart.length() > 0) { if (!builderIsEmpty) { builder.append(' '); } builder.append(addressPart); builderIsEmpty = false; } } } address = builder.toString().trim(); } else { address = propertyNode.propValue; } contact.addContactmethod(kind, type, address, label, isPrimary); } else if (name.equals("ORG")) { // vCard specification does not specify other types. int type = Contacts.OrganizationColumns.TYPE_WORK; boolean isPrimary = false; for (String typeString : propertyNode.paramMap_TYPE) { if (typeString.equals("PREF") && !prefIsSetOrganization) { // vCard specification officially does not have PREF in ORG. // This is just for safety. prefIsSetOrganization = true; isPrimary = true; } // XXX: Should we cope with X- words? } List<String> list = propertyNode.propValue_vector; int size = list.size(); StringBuilder builder = new StringBuilder(); for (Iterator<String> iter = list.iterator(); iter.hasNext();) { builder.append(iter.next()); if (iter.hasNext()) { builder.append(' '); } } contact.addOrganization(type, builder.toString(), "", isPrimary); } else if (name.equals("TITLE")) { contact.setPosition(propertyNode.propValue); } else if (name.equals("ROLE")) { contact.setPosition(propertyNode.propValue); } else if (name.equals("PHOTO")) { // We prefer PHOTO to LOGO. String valueType = propertyNode.paramMap.getAsString("VALUE"); if (valueType != null && valueType.equals("URL")) { // TODO: do something. } else { // Assume PHOTO is stored in BASE64. In that case, // data is already stored in propValue_bytes in binary form. // It should be automatically done by VBuilder (VDataBuilder/VCardDatabuilder) contact.photoBytes = propertyNode.propValue_bytes; String type = propertyNode.paramMap.getAsString("TYPE"); if (type != null) { contact.photoType = type; } } } else if (name.equals("LOGO")) { // When PHOTO is not available this is not URL, // we use this instead of PHOTO. String valueType = propertyNode.paramMap.getAsString("VALUE"); if (valueType != null && valueType.equals("URL")) { // TODO: do something. } else if (contact.photoBytes == null) { contact.photoBytes = propertyNode.propValue_bytes; String type = propertyNode.paramMap.getAsString("TYPE"); if (type != null) { contact.photoType = type; } } } else if (name.equals("EMAIL")) { int type = -1; String label = null; boolean isPrimary = false; for (String typeString : propertyNode.paramMap_TYPE) { if (typeString.equals("PREF") && !prefIsSetEmail) { // Only first "PREF" is considered. prefIsSetEmail = true; isPrimary = true; } else if (typeString.equalsIgnoreCase("HOME")) { type = Contacts.ContactMethodsColumns.TYPE_HOME; } else if (typeString.equalsIgnoreCase("WORK")) { type = Contacts.ContactMethodsColumns.TYPE_WORK; } else if (typeString.equalsIgnoreCase("CELL")) { // We do not have Contacts.ContactMethodsColumns.TYPE_MOBILE yet. type = Contacts.ContactMethodsColumns.TYPE_CUSTOM; label = Contacts.ContactMethodsColumns.MOBILE_EMAIL_TYPE_NAME; } else if (typeString.toUpperCase().startsWith("X-") && type < 0) { type = Contacts.ContactMethodsColumns.TYPE_CUSTOM; label = typeString.substring(2); } else if (type < 0) { // vCard 3.0 allows iana-token. // We may have INTERNET (specified in vCard spec), // SCHOOL, etc. type = Contacts.ContactMethodsColumns.TYPE_CUSTOM; label = typeString; } } // We use "OTHER" as default. if (type < 0) { type = Contacts.ContactMethodsColumns.TYPE_OTHER; } contact.addContactmethod(Contacts.KIND_EMAIL, type, propertyNode.propValue,label, isPrimary); } else if (name.equals("TEL")) { int type = -1; String label = null; boolean isPrimary = false; boolean isFax = false; for (String typeString : propertyNode.paramMap_TYPE) { if (typeString.equals("PREF") && !prefIsSetPhone) { // Only first "PREF" is considered. prefIsSetPhone = true; isPrimary = true; } else if (typeString.equalsIgnoreCase("HOME")) { type = Contacts.PhonesColumns.TYPE_HOME; } else if (typeString.equalsIgnoreCase("WORK")) { type = Contacts.PhonesColumns.TYPE_WORK; } else if (typeString.equalsIgnoreCase("CELL")) { type = Contacts.PhonesColumns.TYPE_MOBILE; } else if (typeString.equalsIgnoreCase("PAGER")) { type = Contacts.PhonesColumns.TYPE_PAGER; } else if (typeString.equalsIgnoreCase("FAX")) { isFax = true; } else if (typeString.equalsIgnoreCase("VOICE") || typeString.equalsIgnoreCase("MSG")) { // Defined in vCard 3.0. Ignore these because they // conflict with "HOME", "WORK", etc. // XXX: do something? } else if (typeString.toUpperCase().startsWith("X-") && type < 0) { type = Contacts.PhonesColumns.TYPE_CUSTOM; label = typeString.substring(2); } else if (type < 0){ // We may have MODEM, CAR, ISDN, etc... type = Contacts.PhonesColumns.TYPE_CUSTOM; label = typeString; } } // We use "HOME" as default if (type < 0) { type = Contacts.PhonesColumns.TYPE_HOME; } if (isFax) { if (type == Contacts.PhonesColumns.TYPE_HOME) { type = Contacts.PhonesColumns.TYPE_FAX_HOME; } else if (type == Contacts.PhonesColumns.TYPE_WORK) { type = Contacts.PhonesColumns.TYPE_FAX_WORK; } } contact.addPhone(type, propertyNode.propValue, label, isPrimary); } else if (name.equals("NOTE")) { contact.notes.add(propertyNode.propValue); } else if (name.equals("BDAY")) { contact.addExtension(propertyNode); } else if (name.equals("URL")) { contact.addExtension(propertyNode); } else if (name.equals("REV")) { // Revision of this VCard entry. I think we can ignore this. contact.addExtension(propertyNode); } else if (name.equals("UID")) { contact.addExtension(propertyNode); } else if (name.equals("KEY")) { // Type is X509 or PGP? I don't know how to handle this... contact.addExtension(propertyNode); } else if (name.equals("MAILER")) { contact.addExtension(propertyNode); } else if (name.equals("TZ")) { contact.addExtension(propertyNode); } else if (name.equals("GEO")) { contact.addExtension(propertyNode); } else if (name.equals("NICKNAME")) { // vCard 3.0 only. contact.addExtension(propertyNode); } else if (name.equals("CLASS")) { // vCard 3.0 only. // e.g. CLASS:CONFIDENTIAL contact.addExtension(propertyNode); } else if (name.equals("PROFILE")) { // VCard 3.0 only. Must be "VCARD". I think we can ignore this. contact.addExtension(propertyNode); } else if (name.equals("CATEGORIES")) { // VCard 3.0 only. // e.g. CATEGORIES:INTERNET,IETF,INDUSTRY,INFORMATION TECHNOLOGY contact.addExtension(propertyNode); } else if (name.equals("SOURCE")) { // VCard 3.0 only. contact.addExtension(propertyNode); } else if (name.equals("PRODID")) { // VCard 3.0 only. // To specify the identifier for the product that created // the vCard object. contact.addExtension(propertyNode); } else if (name.equals("X-PHONETIC-FIRST-NAME")) { xPhoneticFirstName = propertyNode.propValue; } else if (name.equals("X-PHONETIC-MIDDLE-NAME")) { xPhoneticMiddleName = propertyNode.propValue; } else if (name.equals("X-PHONETIC-LAST-NAME")) { xPhoneticLastName = propertyNode.propValue; } else { // Unknown X- words and IANA token. contact.addExtension(propertyNode); } } if (fullName != null) { contact.name = fullName; } else if(nameFromNProperty != null) { contact.name = nameFromNProperty; } else { contact.name = ""; } if (contact.phoneticName == null && (xPhoneticFirstName != null || xPhoneticMiddleName != null || xPhoneticLastName != null)) { // Note: In Europe, this order should be "LAST FIRST MIDDLE". See the comment around // NAME_ORDER_TYPE_* for more detail. String first; String second; if (nameOrderType == NAME_ORDER_TYPE_JAPANESE) { first = xPhoneticLastName; second = xPhoneticFirstName; } else { first = xPhoneticFirstName; second = xPhoneticLastName; } StringBuilder builder = new StringBuilder(); if (first != null) { builder.append(first); } if (xPhoneticMiddleName != null) { builder.append(xPhoneticMiddleName); } if (second != null) { builder.append(second); } contact.phoneticName = builder.toString(); } // Remove unnecessary white spaces. // It is found that some mobile phone emits phonetic name with just one white space // when a user does not specify one. // This logic is effective toward such kind of weird data. if (contact.phoneticName != null) { contact.phoneticName = contact.phoneticName.trim(); } // If there is no "PREF", we choose the first entries as primary. if (!prefIsSetPhone && contact.phoneList != null && contact.phoneList.size() > 0) { contact.phoneList.get(0).isPrimary = true; } if (!prefIsSetAddress && contact.contactmethodList != null) { for (ContactMethod contactMethod : contact.contactmethodList) { if (contactMethod.kind == Contacts.KIND_POSTAL) { contactMethod.isPrimary = true; break; } } } if (!prefIsSetEmail && contact.contactmethodList != null) { for (ContactMethod contactMethod : contact.contactmethodList) { if (contactMethod.kind == Contacts.KIND_EMAIL) { contactMethod.isPrimary = true; break; } } } if (!prefIsSetOrganization && contact.organizationList != null && contact.organizationList.size() > 0) { contact.organizationList.get(0).isPrimary = true; } return contact; } public String displayString() { if (name.length() > 0) { return name; } if (contactmethodList != null && contactmethodList.size() > 0) { for (ContactMethod contactMethod : contactmethodList) { if (contactMethod.kind == Contacts.KIND_EMAIL && contactMethod.isPrimary) { return contactMethod.data; } } } if (phoneList != null && phoneList.size() > 0) { for (PhoneData phoneData : phoneList) { if (phoneData.isPrimary) { return phoneData.data; } } } return ""; } // private void pushIntoContentProviderOrResolver(Object contentSomething, // long myContactsGroupId) { // ContentResolver resolver = null; // AbstractSyncableContentProvider provider = null; // if (contentSomething instanceof ContentResolver) { // resolver = (ContentResolver)contentSomething; // } else if (contentSomething instanceof AbstractSyncableContentProvider) { // provider = (AbstractSyncableContentProvider)contentSomething; // } else { // Log.e(LOG_TAG, "Unsupported object came."); // return; // } // // ContentValues contentValues = new ContentValues(); // contentValues.put(People.NAME, name); // contentValues.put(People.PHONETIC_NAME, phoneticName); // // if (notes.size() > 1) { // StringBuilder builder = new StringBuilder(); // for (String note : notes) { // builder.append(note); // builder.append("\n"); // } // contentValues.put(People.NOTES, builder.toString()); // } else if (notes.size() == 1){ // contentValues.put(People.NOTES, notes.get(0)); // } // // Uri personUri; // long personId = 0; // if (resolver != null) { // personUri = Contacts.People.createPersonInMyContactsGroup( // resolver, contentValues); // if (personUri != null) { // personId = ContentUris.parseId(personUri); // } // } else { // personUri = provider.nonTransactionalInsert(People.CONTENT_URI, contentValues); // if (personUri != null) { // personId = ContentUris.parseId(personUri); // ContentValues values = new ContentValues(); // values.put(GroupMembership.PERSON_ID, personId); // values.put(GroupMembership.GROUP_ID, myContactsGroupId); // Uri resultUri = provider.nonTransactionalInsert( // GroupMembership.CONTENT_URI, values); // if (resultUri == null) { // Log.e(LOG_TAG, "Faild to insert the person to MyContact."); // provider.nonTransactionalDelete(personUri, null, null); // personUri = null; // } // } // } // // if (personUri == null) { // Log.e(LOG_TAG, "Failed to create the contact."); // return; // } // // if (photoBytes != null) { // if (resolver != null) { // People.setPhotoData(resolver, personUri, photoBytes); // } else { // Uri photoUri = Uri.withAppendedPath(personUri, Contacts.Photos.CONTENT_DIRECTORY); // ContentValues values = new ContentValues(); // values.put(Photos.DATA, photoBytes); // provider.update(photoUri, values, null, null); // } // } // // long primaryPhoneId = -1; // if (phoneList != null && phoneList.size() > 0) { // for (PhoneData phoneData : phoneList) { // ContentValues values = new ContentValues(); // values.put(Contacts.PhonesColumns.TYPE, phoneData.type); // if (phoneData.type == Contacts.PhonesColumns.TYPE_CUSTOM) { // values.put(Contacts.PhonesColumns.LABEL, phoneData.label); // } // // Already formatted. // values.put(Contacts.PhonesColumns.NUMBER, phoneData.data); // // // Not sure about Contacts.PhonesColumns.NUMBER_KEY ... // values.put(Contacts.PhonesColumns.ISPRIMARY, 1); // values.put(Contacts.Phones.PERSON_ID, personId); // Uri phoneUri; // if (resolver != null) { // phoneUri = resolver.insert(Phones.CONTENT_URI, values); // } else { // phoneUri = provider.nonTransactionalInsert(Phones.CONTENT_URI, values); // } // if (phoneData.isPrimary) { // primaryPhoneId = Long.parseLong(phoneUri.getLastPathSegment()); // } // } // } // // long primaryOrganizationId = -1; // if (organizationList != null && organizationList.size() > 0) { // for (OrganizationData organizationData : organizationList) { // ContentValues values = new ContentValues(); // // Currently, we do not use TYPE_CUSTOM. // values.put(Contacts.OrganizationColumns.TYPE, // organizationData.type); // values.put(Contacts.OrganizationColumns.COMPANY, // organizationData.companyName); // values.put(Contacts.OrganizationColumns.TITLE, // organizationData.positionName); // values.put(Contacts.OrganizationColumns.ISPRIMARY, 1); // values.put(Contacts.OrganizationColumns.PERSON_ID, personId); // // Uri organizationUri; // if (resolver != null) { // organizationUri = resolver.insert(Organizations.CONTENT_URI, values); // } else { // organizationUri = provider.nonTransactionalInsert( // Organizations.CONTENT_URI, values); // } // if (organizationData.isPrimary) { // primaryOrganizationId = Long.parseLong(organizationUri.getLastPathSegment()); // } // } // } // // long primaryEmailId = -1; // if (contactmethodList != null && contactmethodList.size() > 0) { // for (ContactMethod contactMethod : contactmethodList) { // ContentValues values = new ContentValues(); // values.put(Contacts.ContactMethodsColumns.KIND, contactMethod.kind); // values.put(Contacts.ContactMethodsColumns.TYPE, contactMethod.type); // if (contactMethod.type == Contacts.ContactMethodsColumns.TYPE_CUSTOM) { // values.put(Contacts.ContactMethodsColumns.LABEL, contactMethod.label); // } // values.put(Contacts.ContactMethodsColumns.DATA, contactMethod.data); // values.put(Contacts.ContactMethodsColumns.ISPRIMARY, 1); // values.put(Contacts.ContactMethods.PERSON_ID, personId); // // if (contactMethod.kind == Contacts.KIND_EMAIL) { // Uri emailUri; // if (resolver != null) { // emailUri = resolver.insert(ContactMethods.CONTENT_URI, values); // } else { // emailUri = provider.nonTransactionalInsert( // ContactMethods.CONTENT_URI, values); // } // if (contactMethod.isPrimary) { // primaryEmailId = Long.parseLong(emailUri.getLastPathSegment()); // } // } else { // probably KIND_POSTAL // if (resolver != null) { // resolver.insert(ContactMethods.CONTENT_URI, values); // } else { // provider.nonTransactionalInsert( // ContactMethods.CONTENT_URI, values); // } // } // } // } // // if (extensionMap != null && extensionMap.size() > 0) { // ArrayList<ContentValues> contentValuesArray; // if (resolver != null) { // contentValuesArray = new ArrayList<ContentValues>(); // } else { // contentValuesArray = null; // } // for (Entry<String, List<String>> entry : extensionMap.entrySet()) { // String key = entry.getKey(); // List<String> list = entry.getValue(); // for (String value : list) { // ContentValues values = new ContentValues(); // values.put(Extensions.NAME, key); // values.put(Extensions.VALUE, value); // values.put(Extensions.PERSON_ID, personId); // if (resolver != null) { // contentValuesArray.add(values); // } else { // provider.nonTransactionalInsert(Extensions.CONTENT_URI, values); // } // } // } // if (resolver != null) { // resolver.bulkInsert(Extensions.CONTENT_URI, // contentValuesArray.toArray(new ContentValues[0])); // } // } // // if (primaryPhoneId >= 0 || primaryOrganizationId >= 0 || primaryEmailId >= 0) { // ContentValues values = new ContentValues(); // if (primaryPhoneId >= 0) { // values.put(People.PRIMARY_PHONE_ID, primaryPhoneId); // } // if (primaryOrganizationId >= 0) { // values.put(People.PRIMARY_ORGANIZATION_ID, primaryOrganizationId); // } // if (primaryEmailId >= 0) { // values.put(People.PRIMARY_EMAIL_ID, primaryEmailId); // } // if (resolver != null) { // resolver.update(personUri, values, null, null); // } else { // provider.nonTransactionalUpdate(personUri, values, null, null); // } // } // } // // /** // * Push this object into database in the resolver. // */ // public void pushIntoContentResolver(ContentResolver resolver) { // pushIntoContentProviderOrResolver(resolver, 0); // } // // /** // * Push this object into AbstractSyncableContentProvider object. // */ // public void pushIntoAbstractSyncableContentProvider( // AbstractSyncableContentProvider provider, long myContactsGroupId) { // boolean successful = false; // provider.beginTransaction(); // try { // pushIntoContentProviderOrResolver(provider, myContactsGroupId); // successful = true; // } finally { // provider.endTransaction(successful); // } // } public boolean isIgnorable() { return TextUtils.isEmpty(name) && TextUtils.isEmpty(phoneticName) && (phoneList == null || phoneList.size() == 0) && (contactmethodList == null || contactmethodList.size() == 0); } }
package com.alibaba.weex.uitest.TC_Downgrade; import android.app.Activity; import android.app.Application; import android.app.Instrumentation; import android.content.Intent; import android.test.ActivityInstrumentationTestCase2; import android.test.TouchUtils; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.ViewGroup; import com.alibaba.weex.R; import com.alibaba.weex.util.ScreenShot; import com.alibaba.weex.WXPageActivity; import com.alibaba.weex.constants.Constants; import com.alibaba.weex.util.ViewUtil; import com.taobao.weex.ui.view.WXTextView; import java.io.IOException; import java.util.ArrayList; /** * Created by admin on 16/3/23. */ public class WeexUiTestCaseTcDowngradeWeexVFalse extends ActivityInstrumentationTestCase2<WXPageActivity> { public final String TAG = "TestScript_Guide=="; public WXPageActivity waTestPageActivity; public ViewGroup mViewGroup; public Application mApplication; public Instrumentation mInstrumentation; public ArrayList<View> mCaseListIndexView = new ArrayList<View>(); public WeexUiTestCaseTcDowngradeWeexVFalse() { super(WXPageActivity.class); } public void setUp() throws Exception{ Log.e("TestScript_Guide", "setUp into!!"); setActivityInitialTouchMode(false); mInstrumentation = getInstrumentation(); Intent intent = new Intent(); intent.putExtra("bundleUrl", Constants.BUNDLE_URL); launchActivityWithIntent("com.alibaba.weex", WXPageActivity.class, intent); setActivity(WXPageActivity.wxPageActivityInstance); waTestPageActivity = getActivity(); // waTestPageActivity.getIntent().getData().toString(); Log.e(TAG,"activity1=" + waTestPageActivity.toString() ); sleep(3000); mViewGroup = (ViewGroup) waTestPageActivity.findViewById(R.id.container); mCaseListIndexView = ViewUtil.findViewWithText(mViewGroup, "TC_"); addAllTargetView("TC_"); Thread.sleep(3000); } // public void testPreConditions() // { // assertNotNull(waTestPageActivity); // assertNotNull(mViewGroup); // assertNotNull(mCaseListIndexView); // // } public void testDowngrade(){ for(final View caseView : mCaseListIndexView){ if (((WXTextView)caseView).getText().toString().equals("TC_Downgrade")){ Log.e(TAG, "TC_Downgrade find"); final WXTextView inputView = (WXTextView)caseView; mInstrumentation.runOnMainSync(new Runnable() { @Override public void run() { inputView.requestFocus(); inputView.performClick(); } }); sleep(2000); setActivity(WXPageActivity.wxPageActivityInstance); Activity activity2 = getActivity(); ViewGroup myGroup = (ViewGroup)(activity2.findViewById(R.id.container)); ArrayList<View> inputListView = new ArrayList<View>(); inputListView = ViewUtil.findViewWithText(myGroup, "TC_Downgrade_weexV_False"); // myGroup.findViewsWithText(inputListView, "TC_Downgrade_weexV_False", View.FIND_VIEWS_WITH_TEXT); Log.e(TAG, "TC_Downgrade_weexV_False== " + inputListView.size()); sleep(2000); if(inputListView.size()!=0){ final WXTextView inputTypeView = (WXTextView)inputListView.get(0); mInstrumentation.runOnMainSync(new Runnable() { @Override public void run() { inputTypeView.requestFocus(); inputTypeView.performClick(); Log.e(TAG, "TC_Downgrade_weexV_False clcik!"); } }); sleep(2000); Log.e(TAG, "TC_Downgrade_weexV_False snap!"); screenShot("TC_Downgrade_weexV_False"); // ScreenShot.takeScreenShotIncludeDialog(getActivity(), "TC_Downgrade_weexV_False"); sleep(2000); } } } } /** * get tc list by text * @param byText * @return * @throws InterruptedException */ public ArrayList<View> getTestCaseListViewByText(String byText) throws InterruptedException { Log.e("TestScript_Guide", "byText ==" + byText); if(TextUtils.isEmpty(byText)){ return null; } ArrayList<View> outViews = new ArrayList<View>(); mViewGroup.findViewsWithText(outViews, byText, View.FIND_VIEWS_WITH_TEXT); for (View view : outViews){ String viewText = ((WXTextView)view).getText().toString(); Log.e(TAG, "viewText ==" + viewText); } return outViews; } /** * findMyCaseByText */ public View findMyCaseByText(String caseText){ if (mCaseListIndexView.size() == 0) return null; WXTextView view = null; for(int i=0; i<mCaseListIndexView.size();i++){ view = (WXTextView)mCaseListIndexView.get(i); if (view.getText().toString().toLowerCase().contains(caseText.toLowerCase())){ return view; } } return view; } /** * sleep */ public void sleep(long time){ try { Thread.sleep(time); } catch (InterruptedException e) { e.printStackTrace(); } } /** * snapshot */ public void screenShot(String shotName) { try { ScreenShot.shoot(WXPageActivity.wxPageActivityInstance, shotName); } catch (IOException e) { e.printStackTrace(); } } public void setViewGroup(ViewGroup viewGroup){ mViewGroup = viewGroup; } public void addAllTargetView(String target){ int max = 6; int count =0 ; while (count < max){ TouchUtils.dragQuarterScreenUp(this, this.getActivity()); mViewGroup = (ViewGroup) waTestPageActivity.findViewById(R.id.container); mCaseListIndexView = ViewUtil.findViewWithText(mViewGroup, target); mCaseListIndexView.addAll(mCaseListIndexView); count ++; } } }
/** * Copyright 2012 Tobias Gierke <tobias.gierke@code-sourcery.de> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.codesourcery.jasm16.ide.ui.viewcontainers; import java.awt.*; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JTabbedPane; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import org.apache.commons.lang.StringUtils; import de.codesourcery.jasm16.compiler.io.DefaultResourceMatcher; import de.codesourcery.jasm16.compiler.io.IResource; import de.codesourcery.jasm16.compiler.io.IResourceResolver; import de.codesourcery.jasm16.exceptions.ResourceNotFoundException; import de.codesourcery.jasm16.ide.*; import de.codesourcery.jasm16.ide.ui.MenuManager; import de.codesourcery.jasm16.ide.ui.MenuManager.MenuEntry; import de.codesourcery.jasm16.ide.ui.utils.UIUtils; import de.codesourcery.jasm16.ide.ui.views.*; public class EditorContainer extends AbstractView implements IViewContainer , IResourceResolver { public static final String VIEW_ID = "editor-container"; private JPanel panel; private final String title; private final ViewContainerHelper helper = new ViewContainerHelper(); private final EditorFactory editorFactory; private final NavigationHistory navigationHistory = new NavigationHistory(); private final IWorkspace workspace; private final List<ViewWithPanel> views = new ArrayList<ViewWithPanel>(); private final JTabbedPane tabbedPane = new JTabbedPane(); private final ChangeListener changeListener = new ChangeListener() { private int previouslySelectedTab = -1; @Override public void stateChanged(ChangeEvent e) { final int newIndex = tabbedPane.getSelectedIndex(); int oldIndex = previouslySelectedTab; previouslySelectedTab = newIndex; if ( oldIndex != newIndex ) { // selected index changed if ( oldIndex != -1 ) { getViewWithPanelForTabIndex( oldIndex ).tabDeselected(); } getViewWithPanelForTabIndex( newIndex ).tabSelected(); } } }; private MenuEntry saveCurrent = new MenuEntry("File/Save") { @Override public void onClick() { System.out.println("Save current editor contents"); } public boolean isVisible() { return ! mayBeDisposed(); }; }; protected final class ViewWithPanel { public int tabIndex; public final IView view; public final JPanel panel; public ViewWithPanel(IView view,int tabIndex) { this.view = view; this.tabIndex = tabIndex; this.panel = view.getPanel( EditorContainer.this ); } public void tabSelected() { if ( view instanceof IViewStateListener) { ((IViewStateListener) view).viewVisible(); } } public void tabDeselected() { if ( view instanceof IViewStateListener) { ((IViewStateListener) view).viewHidden(); } } public void toFront() { tabbedPane.setSelectedIndex( tabIndex ); } } public EditorContainer(String title, IWorkspace workspace,IViewContainer parent,EditorFactory editorFactory) { this.title = title; this.workspace = workspace; this.editorFactory = editorFactory; } @Override protected JPanel getPanel() { if ( panel == null ) { panel = createPanel(); } return panel; } @Override public void setBlockAllUserInput(boolean yesNo) { if ( panel != null ) { Container parent = panel.getParent(); while( parent != null && !(parent instanceof JFrame ) ) { parent = parent.getParent(); } if ( parent != null && parent instanceof JFrame ) { UIUtils.setBlockAllUserInput( (JFrame) parent , yesNo ); } } } @Override public void toFront(IView view) { } private JPanel createPanel() { final JPanel result = new JPanel(); result.setLayout( new GridBagLayout() ); GridBagConstraints cnstrs = constraints(0 , 0 , true , true , GridBagConstraints.BOTH ); setColors( result ); tabbedPane.setBackground( Color.WHITE ); tabbedPane.setForeground( Color.black ); result.add( tabbedPane ,cnstrs ); if ( getViewContainer().getMenuManager() != null ) { getViewContainer().getMenuManager().addEntry( saveCurrent ); } tabbedPane.addChangeListener( changeListener ); tabbedPane.addKeyListener( new KeyAdapter() { public void keyReleased(KeyEvent e) { if ( e.getKeyCode() == KeyEvent.VK_W && ( e.getModifiersEx() & KeyEvent.CTRL_DOWN_MASK ) != 0 ) { int idx = tabbedPane.getSelectedIndex(); if ( idx != -1 ) { disposeView( getViewForTabIndex( idx ) ); } } } } ); return result; } public static final void addEditorCloseKeyListener(Component comp,final IEditorView view) { comp.addKeyListener( new KeyAdapter() { public void keyReleased(KeyEvent e) { if ( e.getKeyCode() == KeyEvent.VK_W && ( e.getModifiersEx() & KeyEvent.CTRL_DOWN_MASK ) != 0 ) { System.out.println("*** Closing editor "+view+" ***"); if ( view.hasViewContainer() ) { view.getViewContainer().disposeView( view ); } else { view.dispose(); } } } } ); } @Override public IView addView(IView view) { internalAddView(view); return view; } private ViewWithPanel internalAddView(IView view) { final int index = tabbedPane.getTabCount(); final ViewWithPanel newView = new ViewWithPanel( view , index ); views.add( newView ); tabbedPane.add( view.getTitle() , newView.panel ); return newView; } protected void selectTab(IView view) { for ( ViewWithPanel v : views ) { if ( v.view == view ) { tabbedPane.setSelectedIndex( v.tabIndex ); return; } } } protected ViewWithPanel getViewWithPanelForTabIndex(int tabIndex) { for ( ViewWithPanel v : views ) { if ( v.tabIndex == tabIndex ) { return v; } } throw new IllegalArgumentException("Invalid tab index: "+tabIndex); } protected IView getViewForTabIndex(int tabIndex) { return getViewWithPanelForTabIndex( tabIndex ).view; } @Override public void setTitle(IView view, String title) { for ( ViewWithPanel p : this.views ) { if ( p.view == view ) { final int index = tabbedPane.indexOfComponent( p.panel ); if ( index != -1 ) { tabbedPane.setTitleAt( index , title ); } break; } } } @Override public void disposeHook() { final List<ViewWithPanel> copy = new ArrayList<ViewWithPanel>(this.views); for ( ViewWithPanel v : copy ) { v.view.dispose(); } this.views.clear(); if ( getViewContainer().getMenuManager() != null ) { getViewContainer().getMenuManager().removeEntry( saveCurrent ); } helper.fireViewContainerClosed( this ); } @Override public List<IView> getViews() { final List<IView> result = new ArrayList<IView>(); for ( ViewWithPanel p : this.views ) { result.add( p.view ); } return result; } @Override public void disposeView(IView view) { if (view == null) { throw new IllegalArgumentException("view must not be NULL"); } int disposedTabIndex = -1; final List<ViewWithPanel> copy = new ArrayList<ViewWithPanel>(this.views); for (Iterator<ViewWithPanel> it = copy.iterator(); it.hasNext();) { final ViewWithPanel viewWithPanel = it.next(); if ( viewWithPanel.view == view ) { this.views.remove( viewWithPanel ); disposedTabIndex = viewWithPanel.tabIndex; this.tabbedPane.remove( viewWithPanel.panel ); viewWithPanel.view.dispose(); break; } } if ( disposedTabIndex != -1 ) { // adjust tab indices int previousTabToFocus = -1; int nextTabToFocus = -1; for ( ViewWithPanel v : this.views ) { if ( v.tabIndex >= disposedTabIndex ) { v.tabIndex--; if ( nextTabToFocus == -1 ) { nextTabToFocus = v.tabIndex; } } else { previousTabToFocus = v.tabIndex; } } // focus next/previous tab if ( nextTabToFocus != -1 ) { tabbedPane.setSelectedIndex( nextTabToFocus ); } else if ( previousTabToFocus != -1 ) { tabbedPane.setSelectedIndex( previousTabToFocus ); } } } @Override public String getTitle() { return title; } @Override public void refreshDisplay() { for ( ViewWithPanel p : this.views ) { p.view.refreshDisplay(); } } public IEditorView getEditor(IResource resource) { if (resource == null) { throw new IllegalArgumentException("resource must not be NULL"); } for ( ViewWithPanel p : this.views ) { if ( p.view instanceof IEditorView) { if ( DefaultResourceMatcher.INSTANCE.isSame( ((IEditorView) p.view).getCurrentResource() , resource ) ) { return (IEditorView) p.view; } } } return null; } @Override public boolean mayBeDisposed() { boolean result = false; for ( ViewWithPanel p : this.views ) { if ( p.view instanceof IEditorView) { result |= ((IEditorView) p.view).hasUnsavedContent(); } } return result; } @Override public String getID() { return VIEW_ID; } @Override public IView getViewByID(String viewId) { if (StringUtils.isBlank(viewId)) { throw new IllegalArgumentException("viewId must not be blank/null"); } for ( ViewWithPanel p : this.views ) { if ( p.view.getID().equals( viewId ) ) { return p.view; } } return null; } @Override public MenuManager getMenuManager() { return null; } @Override public void addViewContainerListener(IViewContainerListener listener) { helper.addViewContainerListener( listener ); } @Override public void removeViewContainerListener(IViewContainerListener listener) { helper.removeViewContainerListener( listener ); } public IEditorView openResource(IWorkspace workspace , IAssemblyProject project,IResource resource,int caretPosition) throws IOException { IEditorView editor = getEditor( resource ); if ( editor != null ) { editor.refreshDisplay(); selectTab( editor ); return editor; } editor = editorFactory.createEditor( project , resource , this , navigationHistory ); final ViewWithPanel viewWithPanel = internalAddView( editor ); tabbedPane.setSelectedIndex( viewWithPanel.tabIndex ); // open resource AFTER IView has been added to this container, // view may rely on methods of this container editor.openResource( project , resource , caretPosition ); return editor; } private List<SourceCodeView> getSourceCodeViews() { List<SourceCodeView> result = new ArrayList<SourceCodeView>(); for ( ViewWithPanel view : this.views ) { if ( view.view instanceof SourceCodeView) { result.add( (SourceCodeView) view.view ); } } return result; } @Override public IResource resolve(String identifier) throws ResourceNotFoundException { IResource result = tryResolve(identifier); if ( result == null ) { throw new ResourceNotFoundException("Failed to find resource '"+identifier+"'",identifier); } return result; } private IResource tryResolve(String identifier) { for ( SourceCodeView v : getSourceCodeViews() ) { if ( v.getSourceFromMemory().getIdentifier().equals( identifier ) ) { return v.getSourceFromMemory(); } } return null; } @Override public IResource resolveRelative(String identifier, IResource parent) throws ResourceNotFoundException { IResource result = tryResolve(identifier); if ( result == null ) { throw new ResourceNotFoundException("Failed to find resource '"+identifier+"'",identifier); } return result; } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.resources; import com.google.api.pathtemplate.PathTemplate; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class CustomerManagerLinkName implements ResourceName { private static final PathTemplate CUSTOMER_ID_MANAGER_CUSTOMER_ID_MANAGER_LINK_ID = PathTemplate.createWithoutUrlEncoding( "customers/{customer_id}/customerManagerLinks/{manager_customer_id}~{manager_link_id}"); private volatile Map<String, String> fieldValuesMap; private final String customerId; private final String managerCustomerId; private final String managerLinkId; @Deprecated protected CustomerManagerLinkName() { customerId = null; managerCustomerId = null; managerLinkId = null; } private CustomerManagerLinkName(Builder builder) { customerId = Preconditions.checkNotNull(builder.getCustomerId()); managerCustomerId = Preconditions.checkNotNull(builder.getManagerCustomerId()); managerLinkId = Preconditions.checkNotNull(builder.getManagerLinkId()); } public String getCustomerId() { return customerId; } public String getManagerCustomerId() { return managerCustomerId; } public String getManagerLinkId() { return managerLinkId; } public static Builder newBuilder() { return new Builder(); } public Builder toBuilder() { return new Builder(this); } public static CustomerManagerLinkName of( String customerId, String managerCustomerId, String managerLinkId) { return newBuilder() .setCustomerId(customerId) .setManagerCustomerId(managerCustomerId) .setManagerLinkId(managerLinkId) .build(); } public static String format(String customerId, String managerCustomerId, String managerLinkId) { return newBuilder() .setCustomerId(customerId) .setManagerCustomerId(managerCustomerId) .setManagerLinkId(managerLinkId) .build() .toString(); } public static CustomerManagerLinkName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } Map<String, String> matchMap = CUSTOMER_ID_MANAGER_CUSTOMER_ID_MANAGER_LINK_ID.validatedMatch( formattedString, "CustomerManagerLinkName.parse: formattedString not in valid format"); return of( matchMap.get("customer_id"), matchMap.get("manager_customer_id"), matchMap.get("manager_link_id")); } public static List<CustomerManagerLinkName> parseList(List<String> formattedStrings) { List<CustomerManagerLinkName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<CustomerManagerLinkName> values) { List<String> list = new ArrayList<>(values.size()); for (CustomerManagerLinkName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return CUSTOMER_ID_MANAGER_CUSTOMER_ID_MANAGER_LINK_ID.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (customerId != null) { fieldMapBuilder.put("customer_id", customerId); } if (managerCustomerId != null) { fieldMapBuilder.put("manager_customer_id", managerCustomerId); } if (managerLinkId != null) { fieldMapBuilder.put("manager_link_id", managerLinkId); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return CUSTOMER_ID_MANAGER_CUSTOMER_ID_MANAGER_LINK_ID.instantiate( "customer_id", customerId, "manager_customer_id", managerCustomerId, "manager_link_id", managerLinkId); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { CustomerManagerLinkName that = ((CustomerManagerLinkName) o); return Objects.equals(this.customerId, that.customerId) && Objects.equals(this.managerCustomerId, that.managerCustomerId) && Objects.equals(this.managerLinkId, that.managerLinkId); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(customerId); h *= 1000003; h ^= Objects.hashCode(managerCustomerId); h *= 1000003; h ^= Objects.hashCode(managerLinkId); return h; } /** * Builder for * customers/{customer_id}/customerManagerLinks/{manager_customer_id}~{manager_link_id}. */ public static class Builder { private String customerId; private String managerCustomerId; private String managerLinkId; protected Builder() {} public String getCustomerId() { return customerId; } public String getManagerCustomerId() { return managerCustomerId; } public String getManagerLinkId() { return managerLinkId; } public Builder setCustomerId(String customerId) { this.customerId = customerId; return this; } public Builder setManagerCustomerId(String managerCustomerId) { this.managerCustomerId = managerCustomerId; return this; } public Builder setManagerLinkId(String managerLinkId) { this.managerLinkId = managerLinkId; return this; } private Builder(CustomerManagerLinkName customerManagerLinkName) { this.customerId = customerManagerLinkName.customerId; this.managerCustomerId = customerManagerLinkName.managerCustomerId; this.managerLinkId = customerManagerLinkName.managerLinkId; } public CustomerManagerLinkName build() { return new CustomerManagerLinkName(this); } } }
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.settings; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.admin.DevicePolicyManager; import android.content.ActivityNotFoundException; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.content.res.Resources; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.Uri; import android.nfc.NfcAdapter; import android.nfc.NfcManager; import android.os.Bundle; import android.os.SystemProperties; import android.os.UserHandle; import android.os.UserManager; import android.preference.Preference; import android.preference.Preference.OnPreferenceChangeListener; import android.preference.PreferenceCategory; import android.preference.PreferenceScreen; import android.preference.SwitchPreference; import android.provider.SearchIndexableResource; import android.provider.Settings; import android.telephony.TelephonyManager; import android.text.TextUtils; import android.util.Log; import com.android.internal.telephony.SmsApplication; import com.android.internal.telephony.SmsApplication.SmsApplicationData; import com.android.internal.telephony.TelephonyIntents; import com.android.internal.telephony.TelephonyProperties; import com.android.settings.nfc.NfcEnabler; import com.android.settings.search.BaseSearchIndexProvider; import com.android.settings.search.Indexable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; public class WirelessSettings extends SettingsPreferenceFragment implements OnPreferenceChangeListener, Indexable { private static final String TAG = "WirelessSettings"; private static final String KEY_TOGGLE_AIRPLANE = "toggle_airplane"; private static final String KEY_TOGGLE_NFC = "toggle_nfc"; private static final String KEY_WIMAX_SETTINGS = "wimax_settings"; private static final String KEY_ANDROID_BEAM_SETTINGS = "android_beam_settings"; private static final String KEY_VPN_SETTINGS = "vpn_settings"; private static final String KEY_TETHER_SETTINGS = "tether_settings"; private static final String KEY_PROXY_SETTINGS = "proxy_settings"; private static final String KEY_MOBILE_NETWORK_SETTINGS = "mobile_network_settings"; private static final String KEY_MANAGE_MOBILE_PLAN = "manage_mobile_plan"; private static final String KEY_SMS_APPLICATION = "sms_application"; private static final String KEY_TOGGLE_NSD = "toggle_nsd"; //network service discovery private static final String KEY_CELL_BROADCAST_SETTINGS = "cell_broadcast_settings"; private static final String KEY_NFC_CATEGORY_SETTINGS = "nfc_category_settings"; private static final String KEY_NFC_PAYMENT_SETTINGS = "nfc_payment_settings"; public static final String EXIT_ECM_RESULT = "exit_ecm_result"; public static final int REQUEST_CODE_EXIT_ECM = 1; private AirplaneModeEnabler mAirplaneModeEnabler; private SwitchPreference mAirplaneModePreference; private NfcEnabler mNfcEnabler; private NfcAdapter mNfcAdapter; private NsdEnabler mNsdEnabler; private ConnectivityManager mCm; private TelephonyManager mTm; private PackageManager mPm; private UserManager mUm; private static final int MANAGE_MOBILE_PLAN_DIALOG_ID = 1; private static final String SAVED_MANAGE_MOBILE_PLAN_MSG = "mManageMobilePlanMessage"; private AppListPreference mSmsApplicationPreference; /** * Invoked on each preference click in this hierarchy, overrides * PreferenceFragment's implementation. Used to make sure we track the * preference click events. */ @Override public boolean onPreferenceTreeClick(PreferenceScreen preferenceScreen, Preference preference) { log("onPreferenceTreeClick: preference=" + preference); if (preference == mAirplaneModePreference && Boolean.parseBoolean( SystemProperties.get(TelephonyProperties.PROPERTY_INECM_MODE))) { // In ECM mode launch ECM app dialog startActivityForResult( new Intent(TelephonyIntents.ACTION_SHOW_NOTICE_ECM_BLOCK_OTHERS, null), REQUEST_CODE_EXIT_ECM); return true; } else if (preference == findPreference(KEY_MANAGE_MOBILE_PLAN)) { onManageMobilePlanClick(); } // Let the intents be launched by the Preference manager return super.onPreferenceTreeClick(preferenceScreen, preference); } private String mManageMobilePlanMessage; public void onManageMobilePlanClick() { log("onManageMobilePlanClick:"); mManageMobilePlanMessage = null; Resources resources = getActivity().getResources(); NetworkInfo ni = mCm.getProvisioningOrActiveNetworkInfo(); if (mTm.hasIccCard() && (ni != null)) { // Check for carrier apps that can handle provisioning first Intent provisioningIntent = new Intent(TelephonyIntents.ACTION_CARRIER_SETUP); List<String> carrierPackages = mTm.getCarrierPackageNamesForIntent(provisioningIntent); if (carrierPackages != null && !carrierPackages.isEmpty()) { if (carrierPackages.size() != 1) { Log.w(TAG, "Multiple matching carrier apps found, launching the first."); } provisioningIntent.setPackage(carrierPackages.get(0)); startActivity(provisioningIntent); return; } // Get provisioning URL String url = mCm.getMobileProvisioningUrl(); if (!TextUtils.isEmpty(url)) { Intent intent = Intent.makeMainSelectorActivity(Intent.ACTION_MAIN, Intent.CATEGORY_APP_BROWSER); intent.setData(Uri.parse(url)); intent.setFlags(Intent.FLAG_ACTIVITY_BROUGHT_TO_FRONT | Intent.FLAG_ACTIVITY_NEW_TASK); try { startActivity(intent); } catch (ActivityNotFoundException e) { Log.w(TAG, "onManageMobilePlanClick: startActivity failed" + e); } } else { // No provisioning URL String operatorName = mTm.getSimOperatorName(); if (TextUtils.isEmpty(operatorName)) { // Use NetworkOperatorName as second choice in case there is no // SPN (Service Provider Name on the SIM). Such as with T-mobile. operatorName = mTm.getNetworkOperatorName(); if (TextUtils.isEmpty(operatorName)) { mManageMobilePlanMessage = resources.getString( R.string.mobile_unknown_sim_operator); } else { mManageMobilePlanMessage = resources.getString( R.string.mobile_no_provisioning_url, operatorName); } } else { mManageMobilePlanMessage = resources.getString( R.string.mobile_no_provisioning_url, operatorName); } } } else if (mTm.hasIccCard() == false) { // No sim card mManageMobilePlanMessage = resources.getString(R.string.mobile_insert_sim_card); } else { // NetworkInfo is null, there is no connection mManageMobilePlanMessage = resources.getString(R.string.mobile_connect_to_internet); } if (!TextUtils.isEmpty(mManageMobilePlanMessage)) { log("onManageMobilePlanClick: message=" + mManageMobilePlanMessage); showDialog(MANAGE_MOBILE_PLAN_DIALOG_ID); } } private void initSmsApplicationSetting() { log("initSmsApplicationSetting:"); Collection<SmsApplicationData> smsApplications = SmsApplication.getApplicationCollection(getActivity()); // If the list is empty the dialog will be empty, but we will not crash. int count = smsApplications.size(); String[] packageNames = new String[count]; int i = 0; for (SmsApplicationData smsApplicationData : smsApplications) { packageNames[i] = smsApplicationData.mPackageName; i++; } String defaultPackageName = null; ComponentName appName = SmsApplication.getDefaultSmsApplication(getActivity(), true); if (appName != null) { defaultPackageName = appName.getPackageName(); } mSmsApplicationPreference.setPackageNames(packageNames, defaultPackageName); } @Override public Dialog onCreateDialog(int dialogId) { log("onCreateDialog: dialogId=" + dialogId); switch (dialogId) { case MANAGE_MOBILE_PLAN_DIALOG_ID: return new AlertDialog.Builder(getActivity()) .setMessage(mManageMobilePlanMessage) .setCancelable(false) .setPositiveButton(com.android.internal.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int id) { log("MANAGE_MOBILE_PLAN_DIALOG.onClickListener id=" + id); mManageMobilePlanMessage = null; } }) .create(); } return super.onCreateDialog(dialogId); } private void log(String s) { Log.d(TAG, s); } public static boolean isRadioAllowed(Context context, String type) { if (!AirplaneModeEnabler.isAirplaneModeOn(context)) { return true; } // Here we use the same logic in onCreate(). String toggleable = Settings.Global.getString(context.getContentResolver(), Settings.Global.AIRPLANE_MODE_TOGGLEABLE_RADIOS); return toggleable != null && toggleable.contains(type); } private boolean isSmsSupported() { // Some tablet has sim card but could not do telephony operations. Skip those. return mTm.isSmsCapable(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (savedInstanceState != null) { mManageMobilePlanMessage = savedInstanceState.getString(SAVED_MANAGE_MOBILE_PLAN_MSG); } log("onCreate: mManageMobilePlanMessage=" + mManageMobilePlanMessage); mCm = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); mTm = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE); mPm = getPackageManager(); mUm = (UserManager) getSystemService(Context.USER_SERVICE); addPreferencesFromResource(R.xml.wireless_settings); final int myUserId = UserHandle.myUserId(); final boolean isSecondaryUser = myUserId != UserHandle.USER_OWNER; final boolean isRestrictedUser = mUm.getUserInfo(myUserId).isRestricted(); final Activity activity = getActivity(); mAirplaneModePreference = (SwitchPreference) findPreference(KEY_TOGGLE_AIRPLANE); SwitchPreference nfc = (SwitchPreference) findPreference(KEY_TOGGLE_NFC); PreferenceCategory nfcCategory = (PreferenceCategory) findPreference(KEY_NFC_CATEGORY_SETTINGS); PreferenceScreen androidBeam = (PreferenceScreen) findPreference(KEY_ANDROID_BEAM_SETTINGS); SwitchPreference nsd = (SwitchPreference) findPreference(KEY_TOGGLE_NSD); mAirplaneModeEnabler = new AirplaneModeEnabler(activity, mAirplaneModePreference); mNfcEnabler = new NfcEnabler(activity, nfc, androidBeam); mSmsApplicationPreference = (AppListPreference) findPreference(KEY_SMS_APPLICATION); // Restricted users cannot currently read/write SMS. if (isRestrictedUser || !Utils.canUserMakeCallsSms(activity)) { removePreference(KEY_SMS_APPLICATION); } else { mSmsApplicationPreference.setOnPreferenceChangeListener(this); initSmsApplicationSetting(); } // Remove NSD checkbox by default getPreferenceScreen().removePreference(nsd); //mNsdEnabler = new NsdEnabler(activity, nsd); String toggleable = Settings.Global.getString(activity.getContentResolver(), Settings.Global.AIRPLANE_MODE_TOGGLEABLE_RADIOS); //enable/disable wimax depending on the value in config.xml final boolean isWimaxEnabled = !isSecondaryUser && this.getResources().getBoolean( com.android.internal.R.bool.config_wimaxEnabled); if (!isWimaxEnabled || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_MOBILE_NETWORKS)) { PreferenceScreen root = getPreferenceScreen(); Preference ps = (Preference) findPreference(KEY_WIMAX_SETTINGS); if (ps != null) root.removePreference(ps); } else { if (toggleable == null || !toggleable.contains(Settings.Global.RADIO_WIMAX ) && isWimaxEnabled) { Preference ps = (Preference) findPreference(KEY_WIMAX_SETTINGS); ps.setDependency(KEY_TOGGLE_AIRPLANE); } } // Manually set dependencies for Wifi when not toggleable. if (toggleable == null || !toggleable.contains(Settings.Global.RADIO_WIFI)) { findPreference(KEY_VPN_SETTINGS).setDependency(KEY_TOGGLE_AIRPLANE); } // Disable VPN. if (isSecondaryUser || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_VPN)) { removePreference(KEY_VPN_SETTINGS); } // Manually set dependencies for Bluetooth when not toggleable. if (toggleable == null || !toggleable.contains(Settings.Global.RADIO_BLUETOOTH)) { // No bluetooth-dependent items in the list. Code kept in case one is added later. } // Manually set dependencies for NFC when not toggleable. if (toggleable == null || !toggleable.contains(Settings.Global.RADIO_NFC)) { findPreference(KEY_TOGGLE_NFC).setDependency(KEY_TOGGLE_AIRPLANE); findPreference(KEY_ANDROID_BEAM_SETTINGS).setDependency(KEY_TOGGLE_AIRPLANE); } // Remove NFC if not available mNfcAdapter = NfcAdapter.getDefaultAdapter(activity); if (mNfcAdapter == null && nfcCategory != null) { getPreferenceScreen().removePreference(nfcCategory); mNfcEnabler = null; } // Remove Mobile Network Settings and Manage Mobile Plan for secondary users, // if it's a wifi-only device, or if the settings are restricted. if (isSecondaryUser || Utils.isWifiOnly(getActivity()) || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_MOBILE_NETWORKS)) { removePreference(KEY_MOBILE_NETWORK_SETTINGS); removePreference(KEY_MANAGE_MOBILE_PLAN); } // Remove Mobile Network Settings and Manage Mobile Plan // if config_show_mobile_plan sets false. final boolean isMobilePlanEnabled = this.getResources().getBoolean( R.bool.config_show_mobile_plan); if (!isMobilePlanEnabled) { Preference pref = findPreference(KEY_MANAGE_MOBILE_PLAN); if (pref != null) { removePreference(KEY_MANAGE_MOBILE_PLAN); } } // Remove SMS Application if the device does not support SMS if (!isSmsSupported()) { removePreference(KEY_SMS_APPLICATION); } // Remove Airplane Mode settings if it's a stationary device such as a TV. if (mPm.hasSystemFeature(PackageManager.FEATURE_TELEVISION)) { removePreference(KEY_TOGGLE_AIRPLANE); } // Enable Proxy selector settings if allowed. Preference mGlobalProxy = findPreference(KEY_PROXY_SETTINGS); final DevicePolicyManager mDPM = (DevicePolicyManager) activity.getSystemService(Context.DEVICE_POLICY_SERVICE); // proxy UI disabled until we have better app support getPreferenceScreen().removePreference(mGlobalProxy); mGlobalProxy.setEnabled(mDPM.getGlobalProxyAdmin() == null); // Disable Tethering if it's not allowed or if it's a wifi-only device final ConnectivityManager cm = (ConnectivityManager) activity.getSystemService(Context.CONNECTIVITY_SERVICE); if (isSecondaryUser || !cm.isTetheringSupported() || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_TETHERING)) { getPreferenceScreen().removePreference(findPreference(KEY_TETHER_SETTINGS)); } else { Preference p = findPreference(KEY_TETHER_SETTINGS); p.setTitle(Utils.getTetheringLabel(cm)); // Grey out if provisioning is not available. p.setEnabled(!TetherSettings .isProvisioningNeededButUnavailable(getActivity())); } // Enable link to CMAS app settings depending on the value in config.xml. boolean isCellBroadcastAppLinkEnabled = this.getResources().getBoolean( com.android.internal.R.bool.config_cellBroadcastAppLinks); try { if (isCellBroadcastAppLinkEnabled) { if (mPm.getApplicationEnabledSetting("com.android.cellbroadcastreceiver") == PackageManager.COMPONENT_ENABLED_STATE_DISABLED) { isCellBroadcastAppLinkEnabled = false; // CMAS app disabled } } } catch (IllegalArgumentException ignored) { isCellBroadcastAppLinkEnabled = false; // CMAS app not installed } if (isSecondaryUser || !isCellBroadcastAppLinkEnabled || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_CELL_BROADCASTS)) { PreferenceScreen root = getPreferenceScreen(); Preference ps = findPreference(KEY_CELL_BROADCAST_SETTINGS); if (ps != null) root.removePreference(ps); } } @Override public void onStart() { super.onStart(); initSmsApplicationSetting(); } @Override public void onResume() { super.onResume(); mAirplaneModeEnabler.resume(); if (mNfcEnabler != null) { mNfcEnabler.resume(); } if (mNsdEnabler != null) { mNsdEnabler.resume(); } } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if (!TextUtils.isEmpty(mManageMobilePlanMessage)) { outState.putString(SAVED_MANAGE_MOBILE_PLAN_MSG, mManageMobilePlanMessage); } } @Override public void onPause() { super.onPause(); mAirplaneModeEnabler.pause(); if (mNfcEnabler != null) { mNfcEnabler.pause(); } if (mNsdEnabler != null) { mNsdEnabler.pause(); } } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_CODE_EXIT_ECM) { Boolean isChoiceYes = data.getBooleanExtra(EXIT_ECM_RESULT, false); // Set Airplane mode based on the return value and checkbox state mAirplaneModeEnabler.setAirplaneModeInECM(isChoiceYes, mAirplaneModePreference.isChecked()); } super.onActivityResult(requestCode, resultCode, data); } @Override protected int getHelpResource() { return R.string.help_url_more_networks; } @Override public boolean onPreferenceChange(Preference preference, Object newValue) { if (preference == mSmsApplicationPreference && newValue != null) { SmsApplication.setDefaultApplication(newValue.toString(), getActivity()); return true; } return false; } /** * For Search. */ public static final Indexable.SearchIndexProvider SEARCH_INDEX_DATA_PROVIDER = new BaseSearchIndexProvider() { @Override public List<SearchIndexableResource> getXmlResourcesToIndex( Context context, boolean enabled) { SearchIndexableResource sir = new SearchIndexableResource(context); sir.xmlResId = R.xml.wireless_settings; return Arrays.asList(sir); } @Override public List<String> getNonIndexableKeys(Context context) { final ArrayList<String> result = new ArrayList<String>(); result.add(KEY_TOGGLE_NSD); final UserManager um = (UserManager) context.getSystemService(Context.USER_SERVICE); final int myUserId = UserHandle.myUserId(); final boolean isSecondaryUser = myUserId != UserHandle.USER_OWNER; final boolean isRestrictedUser = um.getUserInfo(myUserId).isRestricted(); final boolean isWimaxEnabled = !isSecondaryUser && context.getResources().getBoolean( com.android.internal.R.bool.config_wimaxEnabled); if (!isWimaxEnabled || um.hasUserRestriction(UserManager.DISALLOW_CONFIG_MOBILE_NETWORKS)) { result.add(KEY_WIMAX_SETTINGS); } if (isSecondaryUser) { // Disable VPN result.add(KEY_VPN_SETTINGS); } // Remove NFC if not available final NfcManager manager = (NfcManager) context.getSystemService(Context.NFC_SERVICE); if (manager != null) { NfcAdapter adapter = manager.getDefaultAdapter(); if (adapter == null) { result.add(KEY_TOGGLE_NFC); result.add(KEY_ANDROID_BEAM_SETTINGS); } } // Remove Mobile Network Settings and Manage Mobile Plan if it's a wifi-only device. if (isSecondaryUser || Utils.isWifiOnly(context)) { result.add(KEY_MOBILE_NETWORK_SETTINGS); result.add(KEY_MANAGE_MOBILE_PLAN); } // Remove Mobile Network Settings and Manage Mobile Plan // if config_show_mobile_plan sets false. final boolean isMobilePlanEnabled = context.getResources().getBoolean( R.bool.config_show_mobile_plan); if (!isMobilePlanEnabled) { result.add(KEY_MANAGE_MOBILE_PLAN); } // Remove SMS Application if the device does not support SMS TelephonyManager tm = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); if (!tm.isSmsCapable() || isRestrictedUser) { result.add(KEY_SMS_APPLICATION); } final PackageManager pm = context.getPackageManager(); // Remove Airplane Mode settings if it's a stationary device such as a TV. if (pm.hasSystemFeature(PackageManager.FEATURE_TELEVISION)) { result.add(KEY_TOGGLE_AIRPLANE); } // proxy UI disabled until we have better app support result.add(KEY_PROXY_SETTINGS); // Disable Tethering if it's not allowed or if it's a wifi-only device ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); if (isSecondaryUser || !cm.isTetheringSupported()) { result.add(KEY_TETHER_SETTINGS); } // Enable link to CMAS app settings depending on the value in config.xml. boolean isCellBroadcastAppLinkEnabled = context.getResources().getBoolean( com.android.internal.R.bool.config_cellBroadcastAppLinks); try { if (isCellBroadcastAppLinkEnabled) { if (pm.getApplicationEnabledSetting("com.android.cellbroadcastreceiver") == PackageManager.COMPONENT_ENABLED_STATE_DISABLED) { isCellBroadcastAppLinkEnabled = false; // CMAS app disabled } } } catch (IllegalArgumentException ignored) { isCellBroadcastAppLinkEnabled = false; // CMAS app not installed } if (isSecondaryUser || !isCellBroadcastAppLinkEnabled) { result.add(KEY_CELL_BROADCAST_SETTINGS); } return result; } }; }
package aki.packages.pkcs7; import aki.packages.tools.BashReader; import aki.packages.tools.FileWriter; import aki.packages.tools.Logger; import aki.packages.utils.VerifyUtils; import aki.packages.x509.*; import aki.packages.tools.FileReader; /** * Created by aakintol on 28/06/16. */ import java.io.File; public class PKCS7 extends Signable { private final String HEADER = "-----BEGIN PKCS7-----"; private final String FOOTER = "-----END PKCS7-----"; private boolean isEnveloped; private boolean isEncrypted; private byte[] encryptedData; private Certificate certificate; public PKCS7(String rawData, boolean addHeaders) throws PKCS7Exception { String header = ""; String footer = ""; if (addHeaders) { header = HEADER; footer = FOOTER; } this.setContents(String.format("%s\n%s%s", header, rawData, footer)); this.setType(FileType.PEM); this.checkContents(); } public void setCertificate(String buff) throws CertificateException { this.certificate = Certificate.loadCertificateFromBuffer(buff); } public void setCertificate(Certificate certificate) { this.certificate = certificate; } public String getEncryptedDataAsString() { if (encryptedData == null) { return null; } return new String(encryptedData); } public boolean isEnveloped() { return isEnveloped; } public boolean isEncrypted() { return isEncrypted; } public boolean sign(Certificate signer) throws PKCS7Exception { this.setCertSigner(signer); try { return this.sign(signer); } catch (SignatureException e) { throw new PKCS7Exception(e); } } public boolean encrypt() throws CertificateException { if (!isSigned || getSignedDataDER() == null || getSignedDataPEM() == null) { throw new CertificateException("Please sign the PKCS7 first with a signer and its private key."); } // As the first statement of this methods implies it, the encryption process come right after the signing process // This means the encrypt should be very similar to the signing process, and thus easier to understand // This file should exist, so get its contents or dump the signed data if the file is not there. File tempSigned = new File("tmp/temp-"+getFilename(false)+".signed"); File tempEnc = new File("tmp/temp-"+getFilename(false)+".encrypted"); if (!tempSigned.exists()) { FileWriter.write(getDERSignedDataAsString(), tempSigned.getPath()); } addTempFile(tempSigned); //Create a temp file that will contain the signer File tempSignerBlob = new File("tmp/temp-"+getFilename(false)+".signer"); if (!FileWriter.write(this.getCertSigner().getBlob(), tempSignerBlob.getAbsolutePath())) { throw new PKCS7Exception("Couldn't write the signer's blob of data to the file."); } addTempFile(tempSignerBlob); String[] args = {"openssl", "cms", "-encrypt", "-in", tempSigned.getPath(), "-out", tempEnc.getPath(), tempSignerBlob.getPath()}; BashReader bashReader = BashReader.read(args); if (bashReader == null || bashReader.getExitValue() != 0) { if (bashReader == null) { throw new PKCS7Exception("The command \"" + BashReader.toSingleString(args) + "\" failed (null)."); } throw new PKCS7Exception("The command \"" + BashReader.toSingleString(args) + "\" failed - " + bashReader.getOutput() + " ("+bashReader.getExitValue()+")"); } // Now we have a file with encrypted data! this.encryptedData = BashReader.toSingleString(FileReader.getLines(tempEnc)).trim().getBytes(); this.isEncrypted = true; if (cleanTempFiles()) { Logger.debug("PKCS7", "PKCS7.encrypt(): Temp files all cleaned up."); } else { Logger.debug("PKCS7", "PKCS7.encrypt(): Temp files NOT cleaned up (all or some)."); } return isEncrypted; } public boolean signAndEncrypt() throws CertificateException { return sign() && encrypt(); } @Override public boolean sign() throws CertificateException { try { return super.sign(); } catch (SignatureException e) { throw new PKCS7Exception(e); } } public void verifySignature(Certificate caCert) throws SignatureException { String[] args; if (caCert != null) { try { Certificate.loadCertificateFromBuffer(caCert.getBlob()); } catch (CertificateException e) { throw new SignatureException("Invalid certificate ("+e.getMessage()+")"); } args = new String[] {"python", "scripts/pkcs7_verifyier.py", "-in", getContentsFilename(), "-ca", caCert.getFilename()}; } else { args = new String[] {"python", "scripts/pkcs7_verifyier.py", "-in", getContentsFilename()}; } BashReader br = BashReader.read(args); if (br == null) { throw new SignatureException("Invalid command."); } if (br.getExitValue() == 0) { return; } throw new SignatureException("The signature verification has failed."); } private void checkContents() throws PKCS7Exception { String contents = this.getContents(); if (contents == null) { throw new PKCS7Exception(); } if (!contents.startsWith(HEADER) || !contents.endsWith(FOOTER)) { throw new PKCS7Exception("Header or/and footer missing."); } String in = getContentsFilename(); String[] args = {"openssl", "pkcs7", "-inform", "PEM", "-in", in, "-noout"}; BashReader br = BashReader.read(args); if (br == null || br.getExitValue() != 0) { String message; if (br == null) { message = "Error while cheking the PKCS7 data."; } else { message = br.getErrorMessage(); } Logger.error(TAG, message + "\r\tfor "+getContents()); throw new PKCS7Exception(message); } } public static void main(String[] args) { String rawData = /*"-----BEGIN PKCS7-----\n" +*/ "MIIGugYJKoZIhvcNAQcCoIIGqzCCBqcCAQExDjAMBggqhkiG9w0CBQUAMIIDIQYJ\n" + "KoZIhvcNAQcBoIIDEgSCAw4wggMKBgkqhkiG9w0BBwOgggL7MIIC9wIBADGCAUow\n" + "ggFGAgEAMC4wKTELMAkGA1UEChMCcWExGjAYBgNVBAMTEVN0VmluY2VudFFBQ0Ey\n" + "MDExAgEBMA0GCSqGSIb3DQEBAQUABIIBAJAuX2pGfDb4QvwQh8KHmtoeZ4Yawkcc\n" + "qihpBVHoLfw8X1JGYIp1QFc9SHYuesv5G3sxN1RxVwrDAZo+aaGWWwbCLjvmlFAr\n" + "SO5cBXYtJOvnD9DfNlRC++1miOmi2slzbxC7rq7DNo+uaC6YEE/Np/uFmoftLltC\n" + "V6BOgzXWCnDOjTqyuVRyZcjJ5fOJwpwbuAn5jbiEiSQLMUc7hhHdxC0sdlVYwrtO\n" + "Yjh/H9LpoO+H1LacTp41XBpK9QBgB80PTtkRzjlMInmjATtdaWYhPdGJh2s5z0bQ\n" + "mJc8cd2sIN7LAmV/r7I6dGZZkzSAAWOfUxNWRzGHvdITw24G28kZCeQwggGiBgkq\n" + "hkiG9w0BBwEwEQYFKw4DAgcECMa+/NEt+BAtgIIBgEYGsXOk72sGavghfEh80pJO\n" + "KNRxgfI99AzhQH/C+HA3yGU8WH3GUPCCIH/UJ3PxMZOgiAhJytucVrboVqwvvqN8\n" + "BJHsbj702MPLLwvfD3dgz5CjhXRwd+nYVCIihyTWx2SOqFjBhkWayLTbgXga/eRg\n" + "HLV+Pr87rt+6aIiuOrRpfuToxYaeBqAKClj/iJYeRMOCmSxRzx4OPsktg2f06EIw\n" + "W6sWikK53GvIocCXpCiymwiKChDYn5iingh4zkcKVq78ZtuzD9JFhha5BRqueCve\n" + "iModreVI9WJpc0rLjHRaRafLAsic2zxylxR3ycm/TNQ6aU1XXYmY24n3u9pRHH+k\n" + "kxQvzhtt/pw5mwqnTW1Y9J8wMRnW1wPa7uZuv6QxZymfphJWBTCoek5u+pVHCYwf\n" + "TGaP0bh8K3Ylsqwi6bIBaBc1bNkLQ4pRQXa70tU61lL+LuCC3f3auimMdUjWr1QP\n" + "LtRr8zV9AbpbyNVqfDiGWYX4Xu9XFAxghbu2oKJbSKCCAcEwggG9MIIBJqADAgEC\n" + "AiBDMDZGMkVGNjg4Mjc5NUJDQzgyODkxMTkxMkYzRjcyODANBgkqhkiG9w0BAQQF\n" + "ADAVMRMwEQYDVQQDEwoxMjcuMC4xLjEwMB4XDTE2MDYyNzE4MDExMFoXDTE2MDcw\n" + "MzIwMDExMFowFTETMBEGA1UEAxMKMTI3LjAuMS4xMDCBnzANBgkqhkiG9w0BAQEF\n" + "AAOBjQAwgYkCgYEAmlixAXWAbhwCjZN1hRosDwTPNxh4SzoscCAU7UPZk3CDQ10z\n" + "YF5em8Ui4xTjcwWnlUwxsWBD64Pai3WAiqBhuB6AVw5rFTVDV4SMDdU+SLuniRZp\n" + "LK3BXiFiqHQp5Z7fs+OxDzSGpWR0Y5JQUOCfd6RyJ2D7oBY5L89b4uPbs98CAwEA\n" + "ATANBgkqhkiG9w0BAQQFAAOBgQBTx85iXRnNlP9Ojl73OB2K2fK+Yzfo+r3Hf51E\n" + "g7EHP1eWYVi59/QYdN+5WcgViQWbgAygLHqQQa/vppmklp9ZnY2mNLtPIwAKE2sf\n" + "8yXLW6YNE+T4H0lzY8DLBPjR2NHvboC9USuAEl5/0cP1tp7AnXAodyrQ9USsoZ2c\n" + "r3KjpjGCAaYwggGiAgEBMDkwFTETMBEGA1UEAxMKMTI3LjAuMS4xMAIgQzA2RjJF\n" + "RjY4ODI3OTVCQ0M4Mjg5MTE5MTJGM0Y3MjgwDAYIKoZIhvcNAgUFAKCBwTASBgpg\n" + "hkgBhvhFAQkCMQQTAjE5MBgGCSqGSIb3DQEJAzELBgkqhkiG9w0BBwEwHAYJKoZI\n" + "hvcNAQkFMQ8XDTE2MDYyNzE4MDExMFowHwYJKoZIhvcNAQkEMRIEEE7F5LZ/EG9Z\n" + "lkAsVchtrhwwIAYKYIZIAYb4RQEJBTESBBD6vHMHXuym8XD6AqjAlTUyMDAGCmCG\n" + "SAGG+EUBCQcxIhMgQzA2RjJFRjY4ODI3OTVCQ0M4Mjg5MTE5MTJGM0Y3MjgwDQYJ\n" + "KoZIhvcNAQEBBQAEgYBYpGW/8dKMHnED09/pkqr2FYTBSlVTIqAIN0ECHt+BmNW3\n" + "FhzL5AUEAaAcCf+fPuNgFUITOcM0YYGvzXD0vUrtrzfhSk2wFAU+olH/yYM+0mJ7\n" + "ZgVL5zy55NHa7XsrcIVs576RGA6czEoetftYGRykS8zU6SOKFumC86ojkBKeYw==\n"/* + "-----END PKCS7-----\n"*/; try { PKCS7 pkcs7 = new PKCS7(rawData, true); Logger.printOut(pkcs7.getContentsFilename()); } catch (PKCS7Exception e) { e.printStackTrace(); } // rawData = BashReader.toSingleString(FileReader.getLines("/home/aakintol/Downloads/cbn_dsa-cert.pem")); // Signable pkcs7 = new Signable(); // pkcs7.setContents(rawData); //// pkcs7.createFilename(); // try { // Certificate signer = Certificate.loadCertificateFromFile("test-signer.pem"); // PrivateKey privateKey = PrivateKey.loadPrivateKey(new File("test-key.key")); // // pkcs7.setCertSigner(signer); // pkcs7.setPrivateKeySigner(privateKey); // pkcs7.sign(); // int v = pkcs7.verify(); // System.exit(v); // System.out.println(pkcs7.getDERSignedDataAsString()); // } catch (CertificateException e) { // e.printStackTrace(); // System.exit(1); // } // BashReader bashReader = BashReader.read("python", "hexdump", "-in", "verified.bin"); // if (bashReader != null) { // System.out.println(bashReader.getExitValue()); // System.out.println(bashReader.getOutput()); // } else { // System.out.println("HMMM."); // } // Signable signable = new Signable(); // Subject load; // try { // load = Subject.load("/C=CA/L=Ottawa/CN=cbnca"); // } catch (Exception e) { // load = null; // } // // signable.setContents(rawData); // Logger.debug("set priv key + cert: "+ VerifyUtils.setKeyAndSigner("test-key.key", "test-signer.pem", signable)); // signable.sign(null, null, null); // Logger.debug("signed data: "+signable.getSignedDataPEM()); // Logger.printOut(signable.getCertSigner().getBlob()); // Logger.printOut(new String(signable.getPrivateKeySigner().dumpDER())); // Logger.printOut(signable.getPrivateKeySigner().dumpPEM()); // Logger.debug("keygen: "+VerifyUtils.generateKey("rsa", 2048, new File("res/out.key"), new File("res/out.cert"), signable, load)); // // signable.setContents("valid contents."); // Logger.debug("sign: "+SignUtils.execOpenSSLCMSSign("sha1", true, false, false, signable)); // Logger.debug("signed? "+signable.isSigned()); // Logger.debug("locate sig pem: "+VerifyUtils.locateSignature("PEM", signable)); // Logger.debug("locate sig der: "+VerifyUtils.locateSignature("DER", signable)); // Logger.debug("pub key extraction: "+VerifyUtils.extractPublicKeyFromCertificate("pem", signable)); // Logger.debug("asn1parse: "+SignUtils.execOpenSSLASN1Parse("DER", signable, false)); // Logger.debug("extract rsa bin: "+VerifyUtils.extractBinaryRSAEncryptedHash("sha256", signable)); // Hexdump hexReceiver = new Hexdump(); // Logger.debug("hexdump: "+VerifyUtils.performHexdump("res/signed-sha256.bin", hexReceiver)); // Logger.debug("hexdump result: "+hexReceiver.getDump()); // Logger.debug("sig verif: "+VerifyUtils.verifySignature("res/signed-sha256.bin", "verified256.bin", signable)); // VerifyUtils.locateSignature("PEM", signable); // try { // signable.setContents("HAHAHAHAHAHAH to sign."); // signable.sign(); // } catch (CertificateException e) { // e.printStackTrace(); // } try { Thread.sleep(5000); BashReader.read("rm", "-rf", "tmp/"); } catch (InterruptedException e) { e.printStackTrace(); } // BashReader br = BashReader.read("openssl req -key out.key -new -x509 -days 365 -out out.cert -subj \"/CN=cbnca/C=CA/L=Ottawa\""); // // Logger.debug(br != null ? br.getExitValue() + " : "+br.getOutput() : "NULL"); // try { // String[] argv = {"openssl", "req", "-nodes", "-newkey", String.format("%s:%s", "rsa", 2048), "-keyout", "java.key", "-subj", "/C=CA/L=Ottawa/OU=CBN"}; // BashReader br = BashReader.read(argv); // Logger.debug(br != null ? br.toString() : "NULL"); // argv = new String[]{"openssl", "req", "-key", "java.key", "-new", "-x509", "-days", "365", "-out", "java.cert", "-subj", "/C=CA/L=Ottawa/OU=CBN"}; // br = BashReader.read(argv); // Logger.debug(br != null ? br.toString() : "NULL"); // } catch (Exception e) { // e.printStackTrace(); // } } }
/* * Copyright 2008 Ayman Al-Sairafi ayman.alsairafi@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License * at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jsyntaxpane.util; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; /** * * @author subwiz * @author Ayman Al-Sairafi */ public class JarServiceProvider { public static final String SERVICES_ROOT = "META-INF/services/"; private static final Logger LOG = Logger.getLogger(JarServiceProvider.class.getName()); /** * Prevent anyone from instantiating this class. * Just use the static method */ private JarServiceProvider() { } private static ClassLoader getClassLoader() { ClassLoader cl = JarServiceProvider.class.getClassLoader(); return cl == null ? ClassLoader.getSystemClassLoader() : cl; } /** * Return an Object array from the file in META-INF/resources/{classname} * @param cls * @return * @throws java.io.IOException */ public static List<Object> getServiceProviders(Class cls) throws IOException { ArrayList<Object> l = new ArrayList<Object>(); ClassLoader cl = getClassLoader(); String serviceFile = SERVICES_ROOT + cls.getName(); Enumeration<URL> e = cl.getResources(serviceFile); while (e.hasMoreElements()) { URL u = e.nextElement(); InputStream is = u.openStream(); BufferedReader br = null; try { br = new BufferedReader( new InputStreamReader(is, Charset.forName("UTF-8"))); String str = null; while ((str = br.readLine()) != null) { int commentStartIdx = str.indexOf("#"); if (commentStartIdx != -1) { str = str.substring(0, commentStartIdx); } str = str.trim(); if (str.length() == 0) { continue; } try { Object obj = cl.loadClass(str).newInstance(); l.add(obj); } catch (Exception ex) { LOG.warning("Could not load: " + str); LOG.warning(ex.getMessage()); } } } finally { if (br != null) { br.close(); } } } return l; } /** * Read a file in the META-INF/services location. File name will be * fully qualified classname, in all lower-case, appended with ".properties" * If no file is found, then a an empty Property instance will be returned * @param clazz * @return Property file read. */ public static Properties readProperties(Class clazz) { return readProperties(clazz.getName()); } /** * Read a file in the META-INF/services named name appended with * ".properties" * * If no file is found, then a an empty Property instance will be returned * @param name name of file (use dots to separate subfolders). * @return Property file read. */ public static Properties readProperties(String name) { Properties props = new Properties(); String serviceFile = name.toLowerCase(); if (!serviceFile.endsWith(".properties")) { serviceFile += ".properties"; } InputStream is = findResource(serviceFile); if (is != null) { try { props.load(new InputStreamReader(is, "UTF-8")); } catch (IOException ex) { Logger.getLogger(JarServiceProvider.class.getName()).log(Level.SEVERE, null, ex); } } return props; } /** * Read language specific files in the META-INF/services named name appended * with ".properties". The contents of the files are merged as follows: * <ul> * <li>First the default language file (&lt;name&gt;.properties) is read</li> * <li>Then the general language variant of the file * (&lt;name&gt;_&lt;lang&gt;.properties) is read and its * entries are added to/overwrite the entries of the default life</li> * <li>Last the country specific language variant of the file * (&lt;name&gt;_&lt;lang&gt;_&lt;country&gt;.properties) is read and its * entries are added to/overwrite the existing entries</li> * </ul> * Example: You have three files: * <ul> * <li>config.properties which contains the complete configuration * (most likely with English menus, tooltips)</li> * <li>config_de.properties which only contains menu names and tooltips * in German language</li> * <li>config_de_CH which might just contain entries for specific * Swiss spelling variant of some words in a tooltip</li> * <ul> * * If no filesis found, then a an empty Property instance will be returned * @param name name of file (use dots to separate subfolders). * @param locale The locale for which to read the files * @return Property file read. */ public static Properties readProperties(String name, Locale locale) { // If name already ends in ".properties", then cut this off name = name.toLowerCase(); int idx = name.lastIndexOf(".properties"); if (idx > 0) { name = name.substring(0, idx); } // 1. Read properties of default langauge Properties props = readProperties(name); // 2. Read properties of general language variant if (locale != null && locale.getLanguage() != null) { name += "_"+locale.getLanguage(); Properties langProps = readProperties(name); props.putAll(langProps); } // 3. Read properties of country specific language variant if (locale != null && locale.getCountry() != null) { name += "_"+locale.getCountry(); Properties countryProps = readProperties(name); props.putAll(countryProps); } return props; } /** * Read a file in the META-INF/services named name appended with * ".properties", and returns it as a <code>Map<String, String></code> * If no file is found, then a an empty Property instance will be returned * @param name name of file (use dots to separate subfolders). * @return Map of keys and values */ public static Map<String, String> readStringsMap(String name) { Properties props = readProperties(name); HashMap<String, String> map = new HashMap<String, String>(); if (props != null) { for (Map.Entry e : props.entrySet()) { map.put(e.getKey().toString(), e.getValue().toString()); } } return map; } /** * Read the given URL and returns a List of Strings for each input line * Each line will not have the line terminator. * * The resource is searched in /META-INF/services/url, then in * url, then the url is treated as a location in the current classpath * and an attempt to read it from that location is done. * * @param url location of file to read * @return List of Strings for each line read. or EMPTY_LIST if URL is not found */ @SuppressWarnings("unchecked") public static List<String> readLines(String url) { InputStream is = findResource(url); if (is == null) { return Collections.EMPTY_LIST; } List<String> lines = new ArrayList<String>(); try { BufferedReader br = new BufferedReader(new InputStreamReader(is, "UTF-8")); for (String line = br.readLine(); line != null; line = br.readLine()) { // Trim and unescape some control chars line = line.trim().replace("\\n", "\n").replace("\\t", "\t"); lines.add(line); } } catch (IOException ex) { LOG.log(Level.SEVERE, null, ex); } finally { try { is.close(); } catch (IOException ex) { LOG.log(Level.SEVERE, null, ex); } return lines; } } /** * Attempt to find a location url. The following locations are searched in * sequence: * url, * SERVICES_ROOT/url * all classpath/url * @param url * @param cl classloader * @return InputSTream at that location, or null if not found * @see JarServiceProvider#findResource(java.lang.String) */ public static InputStream findResource(String url, ClassLoader cl) { InputStream is = null; URL loc = cl.getResource(url); if (loc == null) { loc = cl.getResource(url); } if (loc == null) { loc = cl.getResource(SERVICES_ROOT + url); } if (loc == null) { is = ClassLoader.getSystemResourceAsStream(url); } else { try { is = loc.openStream(); } catch (IOException ex) { Logger.getLogger(JarServiceProvider.class.getName()).log(Level.SEVERE, null, ex); } } return is; } /** * Attempt to find a location url. The following locations are searched in * sequence: * url, * SERVICES_ROOT/url * all classpath/url * The System ClassLoader is used. * @param url * @return InputSTream at that location, or null if not found * @see JarServiceProvider#findResource(java.lang.String, java.lang.ClassLoader) */ public static InputStream findResource(String url) { return findResource(url, getClassLoader()); } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Tom Huybrechts * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.matrix; import hudson.util.DescribableList; import hudson.model.AbstractBuild; import hudson.model.Cause; import hudson.model.CauseAction; import hudson.model.DependencyGraph; import hudson.model.Descriptor; import hudson.model.Hudson; import hudson.model.Item; import hudson.model.ItemGroup; import hudson.model.JDK; import hudson.model.Label; import hudson.model.ParametersAction; import hudson.model.Project; import hudson.model.SCMedItem; import hudson.model.Queue.NonBlockingTask; import hudson.model.Cause.LegacyCodeCause; import hudson.scm.SCM; import hudson.tasks.BuildWrapper; import hudson.tasks.Builder; import hudson.tasks.LogRotator; import hudson.tasks.Publisher; import java.io.IOException; import java.util.List; import java.util.Map; /** * One configuration of {@link MatrixProject}. * * @author Kohsuke Kawaguchi */ public class MatrixConfiguration extends Project<MatrixConfiguration,MatrixRun> implements SCMedItem, NonBlockingTask { /** * The actual value combination. */ private transient /*final*/ Combination combination; /** * Hash value of {@link #combination}. Cached for efficiency. */ private transient String digestName; public MatrixConfiguration(MatrixProject parent, Combination c) { super(parent,c.toString()); setCombination(c); } @Override public void onLoad(ItemGroup<? extends Item> parent, String name) throws IOException { // directory name is not a name for us --- it's taken from the combination name super.onLoad(parent, combination.toString()); } /** * Used during loading to set the combination back. */ /*package*/ void setCombination(Combination c) { this.combination = c; this.digestName = c.digest().substring(0,8); } /** * Build numbers are always synchronized with the parent. * * <p> * Computing this is bit tricky. Several considerations: * * <ol> * <li>A new configuration build #N is started while the parent build #N is building, * and when that happens we want to return N. * <li>But the configuration build #N is done before the parent build #N finishes, * and when that happens we want to return N+1 because that's going to be the next one. * <li>Configuration builds might skip some numbers if the parent build is aborted * before this configuration is built. * <li>If nothing is building right now and the last build of the parent is #N, * then we want to return N+1. * </ol> */ @Override public int getNextBuildNumber() { AbstractBuild lb = getParent().getLastBuild(); if(lb==null) return 0; int n=lb.getNumber(); if(!lb.isBuilding()) n++; lb = getLastBuild(); if(lb!=null) n = Math.max(n,lb.getNumber()+1); return n; } @Override public int assignBuildNumber() throws IOException { int nb = getNextBuildNumber(); MatrixRun r = getLastBuild(); if(r!=null && r.getNumber()>=nb) // make sure we don't schedule the same build twice throw new IllegalStateException("Build #"+nb+" is already completed"); return nb; } @Override public String getDisplayName() { return combination.toCompactString(getParent().getAxes()); } @Override public MatrixProject getParent() { return (MatrixProject)super.getParent(); } /** * Get the actual combination of the axes values for this {@link MatrixConfiguration} */ public Combination getCombination() { return combination; } /** * Since {@link MatrixConfiguration} is always invoked from {@link MatrixRun} * once and just once, there's no point in having a quiet period. */ @Override public int getQuietPeriod() { return 0; } /** * Inherit the value from the parent. */ @Override public int getScmCheckoutRetryCount() { return getParent().getScmCheckoutRetryCount(); } @Override public boolean isConfigurable() { return false; } @Override protected Class<MatrixRun> getBuildClass() { return MatrixRun.class; } @Override protected MatrixRun newBuild() throws IOException { // for every MatrixRun there should be a parent MatrixBuild MatrixBuild lb = getParent().getLastBuild(); MatrixRun lastBuild = new MatrixRun(this, lb.getTimestamp()); lastBuild.number = lb.getNumber(); builds.put(lastBuild); return lastBuild; } @Override protected void buildDependencyGraph(DependencyGraph graph) { } @Override public MatrixConfiguration asProject() { return this; } @Override public Label getAssignedLabel() { return Hudson.getInstance().getLabel(combination.get("label")); } @Override public String getPronoun() { return Messages.MatrixConfiguration_Pronoun(); } @Override public JDK getJDK() { return Hudson.getInstance().getJDK(combination.get("jdk")); } // // inherit build setting from the parent project // @Override public List<Builder> getBuilders() { return getParent().getBuilders(); } @Override public Map<Descriptor<Publisher>, Publisher> getPublishers() { return getParent().getPublishers(); } @Override public DescribableList<Builder, Descriptor<Builder>> getBuildersList() { return getParent().getBuildersList(); } @Override public DescribableList<Publisher, Descriptor<Publisher>> getPublishersList() { return getParent().getPublishersList(); } @Override public Map<Descriptor<BuildWrapper>, BuildWrapper> getBuildWrappers() { return getParent().getBuildWrappers(); } @Override public Publisher getPublisher(Descriptor<Publisher> descriptor) { return getParent().getPublisher(descriptor); } @Override public LogRotator getLogRotator() { return new LinkedLogRotator(); } @Override public SCM getScm() { return getParent().getScm(); } /*package*/ String getDigestName() { return digestName; } /** * JDK cannot be set on {@link MatrixConfiguration} because * it's controlled by {@link MatrixProject}. * @deprecated * Not supported. */ @Override public void setJDK(JDK jdk) throws IOException { throw new UnsupportedOperationException(); } /** * @deprecated * Value is controlled by {@link MatrixProject}. */ @Override public void setLogRotator(LogRotator logRotator) { throw new UnsupportedOperationException(); } /** * Returns true if this configuration is a configuration * currently in use today (as opposed to the ones that are * there only to keep the past record.) * * @see MatrixProject#getActiveConfigurations() */ public boolean isActiveConfiguration() { return getParent().getActiveConfigurations().contains(this); } /** * On Cygwin, path names cannot be longer than 256 chars. * See http://cygwin.com/ml/cygwin/2005-04/msg00395.html and * http://www.nabble.com/Windows-Filename-too-long-errors-t3161089.html for * the background of this issue. Setting this flag to true would * cause Hudson to use cryptic but short path name, giving more room for * jobs to use longer path names. */ public static boolean useShortWorkspaceName = Boolean.getBoolean(MatrixConfiguration.class.getName()+".useShortWorkspaceName"); /** * @deprecated * Use {@link #scheduleBuild(ParametersAction, Cause)}. Since 1.283 */ public boolean scheduleBuild(ParametersAction parameters) { return scheduleBuild(parameters, new LegacyCodeCause()); } /** * * @param parameters * Can be null. */ public boolean scheduleBuild(ParametersAction parameters, Cause c) { return Hudson.getInstance().getQueue().schedule(this, getQuietPeriod(), parameters, new CauseAction(c))!=null; } }
package net.polybugger.apollot; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.os.Bundle; import android.os.Handler; import android.support.annotation.Nullable; import android.support.design.widget.Snackbar; import android.support.v4.app.Fragment; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.LinearLayout; import android.widget.TextView; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; import net.polybugger.apollot.db.ApolloDbAdapter; import net.polybugger.apollot.db.ClassContract; import net.polybugger.apollot.db.ClassItemContract; import net.polybugger.apollot.db.ClassItemTypeContract; import net.polybugger.apollot.db.DateTimeFormat; import org.apache.commons.lang3.StringUtils; public class ClassItemsFragment extends Fragment { public static final String CLASS_ARG = "net.polybugger.apollot.class_arg"; public static boolean REQUERY = false; public static boolean REQUERY_CLASS_ITEM = false; public static boolean DELETE_CLASS_ITEM = false; public static ClassItemContract.ClassItemEntry CLASS_ITEM = null; private ClassContract.ClassEntry mClass; private RecyclerView mRecyclerView; private Adapter mAdapter; public static ClassItemsFragment newInstance(ClassContract.ClassEntry _class) { ClassItemsFragment f = new ClassItemsFragment(); Bundle args = new Bundle(); args.putSerializable(CLASS_ARG, _class); f.setArguments(args); return f; } @Override public void onAttach(Context context) { super.onAttach(context); if(context instanceof Activity) { /* try { mListener = (Listener) context; } catch(ClassCastException e) { throw new ClassCastException(context.toString() + " must implement " + Listener.class.toString()); } */ } } @Override public void onDetach() { // mListener = null; super.onDetach(); } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { setHasOptionsMenu(true); Bundle args = getArguments(); mClass = (ClassContract.ClassEntry) args.getSerializable(CLASS_ARG); View view = inflater.inflate(R.layout.fragment_class_items, container, false); mRecyclerView = (RecyclerView) view.findViewById(R.id.recycler_view); mRecyclerView.setHasFixedSize(true); LinearLayoutManager linearLayoutManager = new LinearLayoutManager(getActivity()); linearLayoutManager.setOrientation(LinearLayoutManager.VERTICAL); mRecyclerView.setLayoutManager(linearLayoutManager); mAdapter = new Adapter(this); mRecyclerView.setAdapter(mAdapter); ClassActivityFragment rf = (ClassActivityFragment) getFragmentManager().findFragmentByTag(ClassActivityFragment.TAG); if(rf != null) rf.getClassItemsSummary(mClass, getTag()); return view; } @Override public void onResume() { super.onResume(); // TODO onResume if(REQUERY) { ClassActivityFragment rf = (ClassActivityFragment) getFragmentManager().findFragmentByTag(ClassActivityFragment.TAG); if(rf != null) rf.getClassItemsSummary(mClass, getTag()); REQUERY = false; REQUERY_CLASS_ITEM = false; DELETE_CLASS_ITEM = false; CLASS_ITEM = null; } else if(DELETE_CLASS_ITEM) { mAdapter.removeByClassItem(CLASS_ITEM); REQUERY = false; REQUERY_CLASS_ITEM = false; DELETE_CLASS_ITEM = false; CLASS_ITEM = null; new Handler().postDelayed(new Runnable() { @Override public void run() { Snackbar.make(getActivity().findViewById(R.id.coordinator_layout), getString(R.string.class_item_deleted), Snackbar.LENGTH_SHORT).show(); } }, MainActivity.SNACKBAR_POST_DELAYED_MSEC); } else if(REQUERY_CLASS_ITEM) { ClassActivityFragment rf = (ClassActivityFragment) getFragmentManager().findFragmentByTag(ClassActivityFragment.TAG); if(rf != null) { rf.getClassItemSummary(CLASS_ITEM, getTag()); CLASS_ITEM = null; } REQUERY = false; REQUERY_CLASS_ITEM = false; DELETE_CLASS_ITEM = false; CLASS_ITEM = null; } else { } } public void requeryClass(ClassContract.ClassEntry _class) { mClass = _class; } public void insertClassItem(ClassItemContract.ClassItemEntry classItem, long id, String fragmentTag) { if(id != -1) { classItem.setId(id); ClassItemSummary classItemSummary = new ClassItemSummary(classItem); mAdapter.add(classItemSummary); mRecyclerView.smoothScrollToPosition(mAdapter.getItemCount() - 1); new Handler().postDelayed(new Runnable() { @Override public void run() { Snackbar.make(getActivity().findViewById(R.id.coordinator_layout), getString(R.string.class_item_added), Snackbar.LENGTH_SHORT).show(); } }, MainActivity.SNACKBAR_POST_DELAYED_MSEC); } } public void updateClassItemSummary(ClassItemSummary classItemSummary, String fragmentTag) { mAdapter.update(classItemSummary); } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); switch(id) { case R.id.action_sort_item_description: mAdapter.sortBy(R.id.action_sort_item_description); return true; case R.id.action_sort_item_date: mAdapter.sortBy(R.id.action_sort_item_date); return true; case R.id.action_sort_item_type: mAdapter.sortBy(R.id.action_sort_item_type); return true; case R.id.action_sort_check_attendance: mAdapter.sortBy(R.id.action_sort_check_attendance); return true; case R.id.action_sort_perfect_score: mAdapter.sortBy(R.id.action_sort_perfect_score); return true; case R.id.action_sort_submission_due_date: mAdapter.sortBy(R.id.action_sort_submission_due_date); return true; } return super.onOptionsItemSelected(item); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.fragment_class_items, menu); } public void onGetClassItemsSummary(ArrayList<ClassItemSummary> arrayList, String fragmentTag) { mAdapter.setArrayList(arrayList); } public static class Adapter extends RecyclerView.Adapter<Adapter.ViewHolder> { private Fragment mFragment; private ArrayList<ClassItemSummary> mArrayList; private int mSortId; private Comparator<ClassItemSummary> mComparator; public Adapter(Fragment fragment) { mFragment = fragment; mArrayList = new ArrayList<>(); mComparator = new Comparator<ClassItemSummary>() { @Override public int compare(ClassItemSummary lhs, ClassItemSummary rhs) { if(mSortId == R.id.action_sort_item_description) { return lhs.mClassItem.getDescription().compareToIgnoreCase(rhs.mClassItem.getDescription()); } else if(-mSortId == R.id.action_sort_item_description) { return -lhs.mClassItem.getDescription().compareToIgnoreCase(rhs.mClassItem.getDescription()); } else if(mSortId == R.id.action_sort_item_date) { Date lDate = lhs.mClassItem.getItemDate(); Date rDate = rhs.mClassItem.getItemDate(); if(lDate == null) return 1; if(rDate == null) return -1; Calendar lCal = Calendar.getInstance(); lCal.setTime(lDate); Calendar rCal = Calendar.getInstance(); rCal.setTime(rDate); return lCal.compareTo(rCal); } else if(-mSortId == R.id.action_sort_item_date) { Date lDate = lhs.mClassItem.getItemDate(); Date rDate = rhs.mClassItem.getItemDate(); if(lDate == null) return -1; if(rDate == null) return 1; Calendar lCal = Calendar.getInstance(); lCal.setTime(lDate); Calendar rCal = Calendar.getInstance(); rCal.setTime(rDate); return -lCal.compareTo(rCal); } else if(mSortId == R.id.action_sort_item_type) { ClassItemTypeContract.ClassItemTypeEntry lit = lhs.mClassItem.getItemType(); ClassItemTypeContract.ClassItemTypeEntry rit = rhs.mClassItem.getItemType(); if(lit == null) return 1; if(rit == null) return -1; return lit.getDescription().compareToIgnoreCase(rit.getDescription()); } else if(-mSortId == R.id.action_sort_item_type) { ClassItemTypeContract.ClassItemTypeEntry lit = lhs.mClassItem.getItemType(); ClassItemTypeContract.ClassItemTypeEntry rit = rhs.mClassItem.getItemType(); if(lit == null) return -1; if(rit == null) return 1; return -lit.getDescription().compareToIgnoreCase(rit.getDescription()); } else if(mSortId == R.id.action_sort_check_attendance) { boolean lb = lhs.mClassItem.isCheckAttendance(); boolean rb = rhs.mClassItem.isCheckAttendance(); return (lb ? -1 : rb ? 1 : 0); } else if(-mSortId == R.id.action_sort_check_attendance) { boolean lb = lhs.mClassItem.isCheckAttendance(); boolean rb = rhs.mClassItem.isCheckAttendance(); return -(lb ? -1 : rb ? 1 : 0); } else if(mSortId == R.id.action_sort_perfect_score) { Float lps = lhs.mClassItem.getPerfectScore(); Float rps = rhs.mClassItem.getPerfectScore(); if(lps == null) return 1; if(rps == null) return -1; return (lps < rps ? -1 : 1); } else if(-mSortId == R.id.action_sort_perfect_score) { Float lps = lhs.mClassItem.getPerfectScore(); Float rps = rhs.mClassItem.getPerfectScore(); if(lps == null) return -1; if(rps == null) return 1; return -(lps < rps ? -1 : 1); } else if(mSortId == R.id.action_sort_submission_due_date) { Date lDate = lhs.mClassItem.getSubmissionDueDate(); Date rDate = rhs.mClassItem.getSubmissionDueDate(); if(lDate == null) return 1; if(rDate == null) return -1; Calendar lCal = Calendar.getInstance(); lCal.setTime(lDate); Calendar rCal = Calendar.getInstance(); rCal.setTime(rDate); return lCal.compareTo(rCal); } else if(-mSortId == R.id.action_sort_submission_due_date) { Date lDate = lhs.mClassItem.getSubmissionDueDate(); Date rDate = rhs.mClassItem.getSubmissionDueDate(); if(lDate == null) return -1; if(rDate == null) return 1; Calendar lCal = Calendar.getInstance(); lCal.setTime(lDate); Calendar rCal = Calendar.getInstance(); rCal.setTime(rDate); return -lCal.compareTo(rCal); } return 0; } }; } public void setArrayList(ArrayList<ClassItemSummary> arrayList) { mArrayList = arrayList; notifyDataSetChanged(); } public void add(ClassItemSummary entry) { mArrayList.add(entry); notifyDataSetChanged(); } public ClassItemSummary removeByClassItem(ClassItemContract.ClassItemEntry classItem) { ClassItemSummary classItemSummary = null; int size = mArrayList.size(); for(int i = 0; i < size; ++i) { classItemSummary = mArrayList.get(i); if(classItemSummary.mClassItem.equals(classItem)) { mArrayList.remove(i); break; } } notifyDataSetChanged(); return classItemSummary; } public void update(ClassItemSummary classItemSummary) { ClassItemSummary tmpClassItemSummary; int size = mArrayList.size(), pos = size; for(int i = 0; i < size; ++i) { tmpClassItemSummary = mArrayList.get(i); if(tmpClassItemSummary.mClassItem.equals(classItemSummary.mClassItem)) { pos = i; break; } } if(pos < size) mArrayList.remove(pos); mArrayList.add(pos, classItemSummary); notifyDataSetChanged(); } public void sortBy(int sortId) { mSortId = (mSortId == sortId) ? -sortId : sortId; Collections.sort(mArrayList, mComparator); notifyDataSetChanged(); } @Override public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { return new ViewHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.view_holder_class_item_summary, parent, false)); } @Override public void onBindViewHolder(ViewHolder holder, int position) { ClassItemSummary entry = mArrayList.get(position); ClassItemTypeContract.ClassItemTypeEntry itemType = entry.mClassItem.getItemType(); if(itemType != null) holder.mBackgroundLayout.setBackgroundResource(BackgroundRect.getBackgroundResource(itemType.getColor(), mFragment.getContext())); else holder.mBackgroundLayout.setBackgroundResource(BackgroundRect.getBackgroundResource(null, mFragment.getContext())); Resources res = mFragment.getResources(); int topMargin = res.getDimensionPixelSize(R.dimen.recycler_view_item_margin_top); int rightMargin = res.getDimensionPixelSize(R.dimen.recycler_view_item_margin_right); int bottomMargin = res.getDimensionPixelSize(R.dimen.recycler_view_item_margin_bottom); int leftMargin = res.getDimensionPixelSize(R.dimen.recycler_view_item_margin_left); if(position == 0) { LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) holder.mBackgroundLayout.getLayoutParams(); layoutParams.setMargins(leftMargin, topMargin * 2, rightMargin, bottomMargin); holder.mBackgroundLayout.setLayoutParams(layoutParams); } else if(position == (mArrayList.size() - 1)) { LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) holder.mBackgroundLayout.getLayoutParams(); layoutParams.setMargins(leftMargin, topMargin, rightMargin, bottomMargin * 2); holder.mBackgroundLayout.setLayoutParams(layoutParams); } else { LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) holder.mBackgroundLayout.getLayoutParams(); layoutParams.setMargins(leftMargin, topMargin, rightMargin, bottomMargin); holder.mBackgroundLayout.setLayoutParams(layoutParams); } holder.mClickableLayout.setTag(entry); holder.mClickableLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { ClassItemSummary classItemSummary = (ClassItemSummary) v.getTag(); Intent intent = new Intent(mFragment.getContext(), ClassItemActivity.class); Bundle args = new Bundle(); args.putSerializable(ClassItemActivity.CLASS_ARG, ((ClassActivity) mFragment.getActivity()).getClassEntry()); args.putSerializable(ClassItemActivity.CLASS_ITEM_ARG, classItemSummary.mClassItem); intent.putExtras(args); mFragment.startActivity(intent); } }); holder.mTitleTextView.setText(entry.mClassItem.getDescription()); final SimpleDateFormat sdf; if(StringUtils.equalsIgnoreCase(res.getConfiguration().locale.getLanguage(), ApolloDbAdapter.JA_LANGUAGE)) sdf = new SimpleDateFormat(DateTimeFormat.DATE_DISPLAY_TEMPLATE_JA, res.getConfiguration().locale); else sdf = new SimpleDateFormat(DateTimeFormat.DATE_DISPLAY_TEMPLATE, res.getConfiguration().locale); Date itemDate = entry.mClassItem.getItemDate(); if(itemDate == null) holder.mItemDateTextView.setVisibility(View.GONE); else { holder.mItemDateTextView.setText(sdf.format(itemDate)); holder.mItemDateTextView.setVisibility(View.VISIBLE); } if(itemType == null) holder.mItemTypeTextView.setVisibility(View.GONE); else { holder.mItemTypeTextView.setText(itemType.getDescription()); holder.mItemTypeTextView.setVisibility(View.VISIBLE); } if(entry.mClassItem.isCheckAttendance()) holder.mCheckAttendanceTextView.setVisibility(View.VISIBLE); else holder.mCheckAttendanceTextView.setVisibility(View.GONE); if(entry.mClassItem.isRecordScores()) { Float perfectScore = entry.mClassItem.getPerfectScore(); if(perfectScore == null) holder.mPerfectScoreLinearLayout.setVisibility(View.GONE); else { holder.mPerfectScoreTextView.setText(String.format("%.2f", perfectScore)); holder.mPerfectScoreLinearLayout.setVisibility(View.VISIBLE); } } else holder.mPerfectScoreLinearLayout.setVisibility(View.GONE); if(entry.mClassItem.isRecordSubmissions()) { Date submissionDueDate = entry.mClassItem.getSubmissionDueDate(); if(submissionDueDate == null) holder.mSubmissionDueDateLinearLayout.setVisibility(View.GONE); else { holder.mSubmissionDueDateTextView.setText(sdf.format(submissionDueDate)); holder.mSubmissionDueDateLinearLayout.setVisibility(View.VISIBLE); } } else holder.mSubmissionDueDateLinearLayout.setVisibility(View.GONE); int paddingTop = holder.mTitleTextView.getPaddingTop(); int paddingRight = holder.mTitleTextView.getPaddingRight(); int paddingLeft = holder.mTitleTextView.getPaddingLeft(); if(holder.mSubmissionDueDateLinearLayout.getVisibility() == View.VISIBLE) { holder.mSubmissionDueDateLinearLayout.setPadding(0, 0, 0, paddingTop); holder.mPerfectScoreLinearLayout.setPadding(0, 0, 0, 0); holder.mCheckAttendanceTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mItemTypeTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mItemDateTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mTitleTextView.setPadding(paddingLeft, paddingTop, paddingRight, 0); } else if(holder.mPerfectScoreLinearLayout.getVisibility() == View.VISIBLE) { holder.mPerfectScoreLinearLayout.setPadding(0, 0, 0, paddingTop); holder.mCheckAttendanceTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mItemTypeTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mItemDateTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mTitleTextView.setPadding(paddingLeft, paddingTop, paddingRight, 0); } else if(holder.mCheckAttendanceTextView.getVisibility() == View.VISIBLE) { holder.mCheckAttendanceTextView.setPadding(paddingLeft, 0, paddingRight, paddingTop); holder.mItemTypeTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mItemDateTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mTitleTextView.setPadding(paddingLeft, paddingTop, paddingRight, 0); } else if(holder.mItemTypeTextView.getVisibility() == View.VISIBLE) { holder.mItemTypeTextView.setPadding(paddingLeft, 0, paddingRight, paddingTop); holder.mItemDateTextView.setPadding(paddingLeft, 0, paddingRight, 0); holder.mTitleTextView.setPadding(paddingLeft, paddingTop, paddingRight, 0); } else if(holder.mItemDateTextView.getVisibility() == View.VISIBLE) { holder.mItemDateTextView.setPadding(paddingLeft, 0, paddingRight, paddingTop); holder.mTitleTextView.setPadding(paddingLeft, paddingTop, paddingRight, 0); } else holder.mTitleTextView.setPadding(paddingLeft, paddingTop, paddingRight, paddingTop); } @Override public int getItemCount() { return mArrayList.size(); } public static class ViewHolder extends RecyclerView.ViewHolder { protected LinearLayout mBackgroundLayout; protected LinearLayout mClickableLayout; protected TextView mTitleTextView; protected TextView mItemDateTextView; protected TextView mItemTypeTextView; protected TextView mCheckAttendanceTextView; protected LinearLayout mPerfectScoreLinearLayout; protected TextView mPerfectScoreTextView; protected LinearLayout mSubmissionDueDateLinearLayout; protected TextView mSubmissionDueDateTextView; public ViewHolder(View itemView) { super(itemView); mBackgroundLayout = (LinearLayout) itemView.findViewById(R.id.background_layout); mClickableLayout = (LinearLayout) itemView.findViewById(R.id.clickable_layout); mTitleTextView = (TextView) itemView.findViewById(R.id.title_text_view); mItemDateTextView = (TextView) itemView.findViewById(R.id.item_date_text_view); mItemTypeTextView = (TextView) itemView.findViewById(R.id.item_type_text_view); mCheckAttendanceTextView = (TextView) itemView.findViewById(R.id.check_attendance_text_view); mPerfectScoreLinearLayout = (LinearLayout) itemView.findViewById(R.id.perfect_score_linear_layout); mPerfectScoreTextView = (TextView) itemView.findViewById(R.id.perfect_score_text_view); mSubmissionDueDateLinearLayout = (LinearLayout) itemView.findViewById(R.id.submission_due_date_linear_layout); mSubmissionDueDateTextView = (TextView) itemView.findViewById(R.id.submission_due_date_text_view); } } } public static class ClassItemSummary { public ClassItemContract.ClassItemEntry mClassItem; public ClassItemSummary(ClassItemContract.ClassItemEntry classItem) { mClassItem = classItem; } } }
package org.robolectric.bytecode; import javassist.CannotCompileException; import javassist.ClassMap; import javassist.CtClass; import javassist.CtConstructor; import javassist.CtMethod; import javassist.CtNewConstructor; import javassist.CtNewMethod; import javassist.Modifier; import javassist.NotFoundException; import javassist.expr.ConstructorCall; import javassist.expr.ExprEditor; import javassist.expr.MethodCall; import java.util.HashSet; import java.util.Set; import java.util.regex.Pattern; public class MethodGenerator { public static final String CONSTRUCTOR_METHOD_NAME = "__constructor__"; private static final Pattern ANONYMOUS_INNER_CLASS_NAME = Pattern.compile("\\$\\d+$"); private final CtClass ctClass; private final Set<Setup.MethodRef> methodsToIntercept; private CtClass objectCtClass; private Set<String> instrumentedMethods = new HashSet<String>(); public static final ClassMap IDENTITY_CLASS_MAP = new ClassMap() { @Override public Object get(Object jvmClassName) { return jvmClassName; } }; public MethodGenerator(CtClass ctClass, Setup setup) { this.ctClass = ctClass; this.methodsToIntercept = setup.methodsToIntercept(); try { objectCtClass = ctClass.getClassPool().get(Object.class.getName()); } catch (NotFoundException e) { throw new RuntimeException(e); } } public void fixConstructors() throws CannotCompileException, NotFoundException { if (ctClass.isEnum()) { // skip enum constructors because they are not stubs in android.jar return; } boolean hasDefault = false; for (CtConstructor ctConstructor : ctClass.getDeclaredConstructors()) { try { createPlaceholderConstructorMethod(ctConstructor); if (ctConstructor.getParameterTypes().length == 0) { hasDefault = true; ctConstructor.setModifiers(Modifier.setPublic(ctConstructor.getModifiers())); } } catch (Exception e) { throw new RuntimeException("problem instrumenting " + ctConstructor, e); } } if (!hasDefault && !isAnonymousInnerClass()) { ctClass.addMethod(CtNewMethod.make(CtClass.voidType, CONSTRUCTOR_METHOD_NAME, new CtClass[0], new CtClass[0], "{}", ctClass)); ctClass.addConstructor(CtNewConstructor.make(new CtClass[0], new CtClass[0], "{\n" + CONSTRUCTOR_METHOD_NAME + "();\n}\n", ctClass)); } for (CtConstructor ctConstructor : ctClass.getDeclaredConstructors()) { try { fixConstructor(ctConstructor); } catch (Exception e) { throw new RuntimeException("problem instrumenting " + ctConstructor, e); } } } private boolean isAnonymousInnerClass() { return ANONYMOUS_INNER_CLASS_NAME.matcher(ctClass.getName()).find(); } public void createPlaceholderConstructorMethod(CtConstructor ctConstructor) throws NotFoundException, CannotCompileException { ctClass.addMethod(CtNewMethod.make(CtClass.voidType, CONSTRUCTOR_METHOD_NAME, ctConstructor.getParameterTypes(), ctConstructor.getExceptionTypes(), "{}", ctClass)); } public void fixConstructor(CtConstructor ctConstructor) throws NotFoundException, CannotCompileException { ctConstructor.instrument(new NastyMethodInterceptor() { @Override public void edit(ConstructorCall c) throws CannotCompileException { try { CtConstructor constructor = c.getConstructor(); if (c.isSuper() && !hasConstructorMethod(constructor.getDeclaringClass())) { return; } c.replace("{\n" + (c.isSuper() ? "super" : "this") + "." + CONSTRUCTOR_METHOD_NAME + "(" + makeParameterList(constructor.getParameterTypes().length) + ");\n" + "}"); } catch (NotFoundException e) { throw new RuntimeException(e); } } }); ctClass.removeMethod(ctClass.getDeclaredMethod(CONSTRUCTOR_METHOD_NAME, ctConstructor.getParameterTypes())); CtMethod ctorMethod = ctConstructor.toMethod(CONSTRUCTOR_METHOD_NAME, ctClass); ctClass.addMethod(ctorMethod); String methodBody = generateConstructorBody(ctConstructor.getParameterTypes()); ctConstructor.setBody("{\n" + methodBody + "}\n"); } private boolean hasConstructorMethod(CtClass declaringClass) throws NotFoundException { try { declaringClass.getDeclaredMethod(CONSTRUCTOR_METHOD_NAME); return true; } catch (NotFoundException e) { return false; } } public String generateConstructorBody(CtClass[] parameterTypes) throws NotFoundException { return "{\n" + CONSTRUCTOR_METHOD_NAME + "(" + makeParameterList(parameterTypes.length) + ");\n" + "}\n"; } public void fixMethods() throws NotFoundException, CannotCompileException { for (CtMethod ctMethod : ctClass.getDeclaredMethods()) { fixMethod(ctMethod, true); instrumentedMethods.add(ctMethod.getName() + ctMethod.getSignature()); } fixMethodIfNotAlreadyFixed("equals", "(Ljava/lang/Object;)Z"); fixMethodIfNotAlreadyFixed("hashCode", "()I"); fixMethodIfNotAlreadyFixed("toString", "()Ljava/lang/String;"); } public void fixMethodIfNotAlreadyFixed(String methodName, String signature) throws NotFoundException { if (instrumentedMethods.add(methodName + signature)) { CtMethod equalsMethod = ctClass.getMethod(methodName, signature); fixMethod(equalsMethod, false); } } public void fixMethod(final CtMethod ctMethod, boolean isDeclaredOnClass) throws NotFoundException { String describeBefore; try { describeBefore = describe(ctMethod); } catch (NotFoundException e) { new RuntimeException("Unable to instrument " + ctClass.getName() + "." + ctMethod.getName() + "()", e).printStackTrace(); return; } try { CtClass declaringClass = ctMethod.getDeclaringClass(); int originalModifiers = ctMethod.getModifiers(); if (isDeclaredOnClass) { fixCallsToSameMethodOnSuper(ctMethod); } boolean wasNative = Modifier.isNative(originalModifiers); boolean wasFinal = Modifier.isFinal(originalModifiers); boolean wasAbstract = Modifier.isAbstract(originalModifiers); boolean wasDeclaredInClass = ctClass == declaringClass; if (wasFinal && ctClass.isEnum()) { return; } int newModifiers = originalModifiers; if (wasNative) { newModifiers = Modifier.clear(newModifiers, Modifier.NATIVE); } if (wasFinal) { newModifiers = Modifier.clear(newModifiers, Modifier.FINAL); } if (isDeclaredOnClass) { ctMethod.setModifiers(newModifiers); } CtClass returnCtClass = ctMethod.getReturnType(); RoboType returnType = RoboType.find(returnCtClass); String methodName = ctMethod.getName(); CtClass[] paramTypes = ctMethod.getParameterTypes(); boolean isStatic = Modifier.isStatic(originalModifiers); String methodBody = generateMethodBody(ctMethod, wasNative, wasAbstract, returnCtClass, returnType, isStatic, !isDeclaredOnClass); String directMethodName = RobolectricInternals.directMethodName(ctClass.getName(), methodName); if (!CONSTRUCTOR_METHOD_NAME.equals(methodName)) { if (!wasNative && !wasAbstract) { CtMethod copy = CtNewMethod.copy(ctMethod, directMethodName, ctClass, IDENTITY_CLASS_MAP); ctClass.addMethod(copy); } else { ctClass.addMethod(CtNewMethod.make(returnCtClass, directMethodName, paramTypes, new CtClass[0], null, ctClass)); } } if (!isDeclaredOnClass) { CtMethod newMethod = makeNewMethod(ctMethod, returnCtClass, methodName, paramTypes, "{\n" + methodBody + generateCallToSuper(ctMethod) + "\n}"); newMethod.setModifiers(newModifiers); if (wasDeclaredInClass) { ctMethod.insertBefore("{\n" + methodBody + "}\n"); } else { ctClass.addMethod(newMethod); } } else if (wasAbstract || wasNative) { CtMethod newMethod = makeNewMethod(ctMethod, returnCtClass, methodName, paramTypes, "{\n" + methodBody + "\n}"); ctMethod.setBody(newMethod, null); } else { ctMethod.insertBefore("{\n" + methodBody + "}\n"); } } catch (Exception e) { throw new RuntimeException("problem instrumenting " + describeBefore, e); } } public void fixCallsToSameMethodOnSuper(final CtMethod ctMethod) throws CannotCompileException { ctMethod.instrument(new ExprEditor() { @Override public void edit(MethodCall call) throws CannotCompileException { if (call.isSuper() && call.getMethodName().equals(ctMethod.getName())) { try { boolean returnsVoid = ctMethod.getReturnType().equals(CtClass.voidType); try { String callParams = makeParameterList(call.getMethod().getParameterTypes().length); call.replace(RobolectricInternals.class.getName() + ".directlyOn($0);\n" + (returnsVoid ? "" : "$_ = ") + "super." + call.getMethodName() + "(" + callParams + ");"); } catch (CannotCompileException e) { throw new RuntimeException(e); } } catch (NotFoundException e) { throw new RuntimeException(e); } } super.edit(call); } }); } public static String describe(CtMethod ctMethod) throws NotFoundException { return Modifier.toString(ctMethod.getModifiers()) + " " + ctMethod.getReturnType().getSimpleName() + " " + ctMethod.getLongName(); } public CtMethod makeNewMethod(CtMethod ctMethod, CtClass returnCtClass, String methodName, CtClass[] paramTypes, String methodBody) throws CannotCompileException, NotFoundException { return CtNewMethod.make( ctMethod.getModifiers(), returnCtClass, methodName, paramTypes, ctMethod.getExceptionTypes(), methodBody, ctClass); } public String generateCallToSuper(CtMethod ctMethod) throws NotFoundException { boolean superMethodIsInstrumented = !ctClass.getSuperclass().equals(objectCtClass); return (superMethodIsInstrumented ? RobolectricInternals.class.getName() + ".directlyOn($0);\n" : "") + "return super." + ctMethod.getName() + "(" + makeParameterList(ctMethod.getParameterTypes().length) + ");"; } private void makeParameterList(StringBuilder buf, int length) { for (int i = 0; i < length; i++) { if (buf.length() > 0) { buf.append(", "); } buf.append("$"); buf.append(i + 1); } } public String makeParameterList(int length) { StringBuilder buf = new StringBuilder(); makeParameterList(buf, length); return buf.toString(); } public String generateMethodBody(CtMethod ctMethod, boolean wasNative, boolean wasAbstract, CtClass returnCtClass, RoboType returnType, boolean aStatic, boolean shouldGenerateCallToSuper) throws NotFoundException { String methodBody; if (wasAbstract) { methodBody = returnType.isVoid() ? "" : "return " + returnType.defaultReturnString() + ";"; } else { methodBody = generateMethodBody(ctMethod, returnCtClass, returnType, aStatic, shouldGenerateCallToSuper); } if (wasNative && !shouldGenerateCallToSuper) { methodBody += returnType.isVoid() ? "" : "return " + returnType.defaultReturnString() + ";"; } return methodBody; } public String generateMethodBody(CtMethod ctMethod, CtClass returnCtClass, RoboType returnType, boolean isStatic, boolean shouldGenerateCallToSuper) throws NotFoundException { boolean returnsVoid = returnType.isVoid(); String className = ctClass.getName(); /* METHOD BODY TEMPLATE: if (!RobolectricInternals.shouldCallDirectly(isStatic ? class : this)) { Object x = RobolectricInternals.methodInvoked( <className>.class, "<methodName>", isStatic ? null : this, <paramTypes>, <params> ); if (x != null) { return ((<returnClass>)x)<unboxing>; } <optional super call or return default (null/0)>; } */ String methodBody; StringBuilder buf = new StringBuilder(); buf.append("if (!"); generateCallToShouldCallDirectory(isStatic, className, buf); buf.append(") {\n"); if (!returnsVoid) { buf.append("Object x = "); } generateCallToMethodInvoked(ctMethod, isStatic, className, buf); if (!returnsVoid) { buf.append("if (x != null) return (("); buf.append(returnType.nonPrimitiveClassName(returnCtClass)); buf.append(") x)"); buf.append(returnType.unboxString()); buf.append(";\n"); if (shouldGenerateCallToSuper) { buf.append(generateCallToSuper(ctMethod)); } else { buf.append("return "); buf.append(returnType.defaultReturnString()); buf.append(";\n"); } } else { buf.append("return;\n"); } buf.append("}\n"); methodBody = buf.toString(); return methodBody; } public void generateCallToShouldCallDirectory(boolean isStatic, String className, StringBuilder buf) { buf.append(RobolectricInternals.class.getName()); buf.append(".shouldCallDirectly("); buf.append(isStatic ? className + ".class" : "this"); buf.append(")"); } public void generateCallToMethodInvoked(CtMethod ctMethod, boolean isStatic, String className, StringBuilder buf) throws NotFoundException { buf.append(RobolectricInternals.class.getName()); buf.append(".methodInvoked(\n "); buf.append(className); buf.append(".class, \""); buf.append(ctMethod.getName()); buf.append("\", "); if (!isStatic) { buf.append("this"); } else { buf.append("null"); } buf.append(", "); appendParamTypeArray(buf, ctMethod); buf.append(", "); appendParamArray(buf, ctMethod); buf.append(")"); buf.append(";\n"); } public void appendParamTypeArray(StringBuilder buf, CtMethod ctMethod) throws NotFoundException { CtClass[] parameterTypes = ctMethod.getParameterTypes(); if (parameterTypes.length == 0) { buf.append("new String[0]"); } else { buf.append("new String[] {"); for (int i = 0; i < parameterTypes.length; i++) { if (i > 0) buf.append(", "); buf.append("\""); CtClass parameterType = parameterTypes[i]; buf.append(parameterType.getName()); buf.append("\""); } buf.append("}"); } } public void appendParamArray(StringBuilder buf, CtMethod ctMethod) throws NotFoundException { int parameterCount = ctMethod.getParameterTypes().length; if (parameterCount == 0) { buf.append("new Object[0]"); } else { buf.append("new Object[] {"); for (int i = 0; i < parameterCount; i++) { if (i > 0) buf.append(", "); buf.append(RobolectricInternals.class.getName()); buf.append(".autobox("); buf.append("$").append(i + 1); buf.append(")"); } buf.append("}"); } } public void deferClassInitialization() throws CannotCompileException { CtConstructor classInitializer = ctClass.getClassInitializer(); CtMethod staticInitializerMethod; if (classInitializer == null) { staticInitializerMethod = CtNewMethod.make(CtClass.voidType, InstrumentingClassLoader.STATIC_INITIALIZER_METHOD_NAME, new CtClass[0], new CtClass[0], "{}", ctClass); } else { staticInitializerMethod = classInitializer.toMethod(InstrumentingClassLoader.STATIC_INITIALIZER_METHOD_NAME, ctClass); } staticInitializerMethod.setModifiers(Modifier.STATIC | Modifier.PUBLIC); if (!methodsToIntercept.isEmpty()) { staticInitializerMethod.instrument(new NastyMethodInterceptor()); } ctClass.addMethod(staticInitializerMethod); ctClass.makeClassInitializer().setBody("{\n" + RobolectricInternals.class.getName() + ".classInitializing(" + ctClass.getName() + ".class);" + "}"); } private class NastyMethodInterceptor extends ExprEditor { @Override public void edit(MethodCall m) throws CannotCompileException { String methodName = m.getMethodName(); Setup.MethodRef methodRef = new Setup.MethodRef(m.getClassName(), methodName); if (methodsToIntercept.contains(methodRef)) { try { CtMethod targetMethod = m.getMethod(); StringBuilder buf = new StringBuilder(); buf.append("$_ = "); buf.append(RobolectricInternals.class.getName()); buf.append(".intercept(\""); buf.append(m.getClassName()); buf.append("\", \""); buf.append(methodName); buf.append("\", (Object) $0, $args, "); appendParamArray(buf, targetMethod); buf.append(");"); m.replace(buf.toString(), this); } catch (NotFoundException e) { throw new RuntimeException(e); } } } } }
package de.cinovo.cloudconductor.server.rest.shared; /* * #%L * cloudconductor-server * %% * Copyright (C) 2013 - 2014 Cinovo AG * %% * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. * #L% */ import de.cinovo.cloudconductor.api.interfaces.IConfigValue; import de.cinovo.cloudconductor.api.model.ConfigDiff; import de.cinovo.cloudconductor.api.model.ConfigValue; import de.cinovo.cloudconductor.api.model.ConfigValues; import de.cinovo.cloudconductor.server.dao.IConfigValueDAO; import de.cinovo.cloudconductor.server.dao.hibernate.ConfigValueDAOHib; import de.cinovo.cloudconductor.server.handler.ConfigValueHandler; import de.cinovo.cloudconductor.server.handler.GlobalConfigMigrator; import de.cinovo.cloudconductor.server.model.EConfigValue; import de.cinovo.cloudconductor.server.util.ReservedConfigKeyStore; import de.cinovo.cloudconductor.server.util.comparators.ConfigValueDiffer; import de.taimos.dvalin.jaxrs.JaxRsComponent; import de.taimos.restutils.RESTAssert; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import javax.ws.rs.NotAcceptableException; import javax.ws.rs.NotFoundException; import javax.ws.rs.core.Response.Status; import java.util.ArrayList; import java.util.List; /** * Copyright 2013 Cinovo AG<br> * <br> * * @author psigloch */ @JaxRsComponent public class ConfigValueImpl implements IConfigValue { @Autowired private IConfigValueDAO configValueDAO; @Autowired private ConfigValueHandler handler; @Autowired private ConfigValueDiffer differ; @Autowired private GlobalConfigMigrator globalConfigMigrator; @Override public String[] getAvailableTemplates() { return this.configValueDAO.findTemplates().toArray(new String[0]); } @Override @Transactional public ConfigValue[] get(String template) { RESTAssert.assertNotEmpty(template); return this.handler.get(template); } @Override @Transactional public ConfigValue[] getClean(String template) { RESTAssert.assertNotEmpty(template); return this.handler.getClean(template); } @Override @Transactional public ConfigValue[] getCleanUnstacked(String template) { RESTAssert.assertNotEmpty(template); return this.handler.getCleanUnstacked(template); } @Override @Transactional public ConfigValue[] get(String template, String service) { RESTAssert.assertNotEmpty(template); RESTAssert.assertNotEmpty(service); return this.handler.get(template, service); } @Override @Transactional public ConfigValue[] getClean(String template, String service) { RESTAssert.assertNotEmpty(template); RESTAssert.assertNotEmpty(service); return this.handler.getClean(template, service); } @Override @Transactional public String get(String template, String service, String key) { return this.handler.get(template, service, key); } @Override @Transactional public String getClean(String template, String service, String key) { RESTAssert.assertNotEmpty(template); return this.handler.getClean(template, service, key); } @Override @Transactional public ConfigValue[] getCleanVars(String template) { if ((template == null) || template.isEmpty() || template.equals("null")) { return this.getCleanUnstacked(ConfigValueDAOHib.RESERVED_VARIABLE); } List<ConfigValue> result = new ArrayList<>(); for (EConfigValue ecv : this.configValueDAO.findBy(template, ConfigValueDAOHib.RESERVED_VARIABLE)) { result.add(ecv.toApi()); } return result.toArray(new ConfigValue[0]); } @Override @Transactional public ConfigDiff[] diffTemplates(String templateA, String templateB) { return this.differ.compare(templateA, templateB); } @Override @Transactional public String getExact(String template, String service, String key) { RESTAssert.assertNotEmpty(template); RESTAssert.assertNotEmpty(key); EConfigValue result; if (ReservedConfigKeyStore.instance.isReserved(key)) { return ReservedConfigKeyStore.instance.getValue(key); } if ((service == null) || service.isEmpty() || service.equals("null")) { service = null; } result = this.configValueDAO.findBy(template, service, key); if (result == null) { throw new NotFoundException(); } return result.getValue(); } @Override @Transactional public void save(ConfigValue apiObject) { this.prepareConfigValue(apiObject); EConfigValue ecv = this.configValueDAO.findBy(apiObject.getTemplate(), apiObject.getService(), apiObject.getKey()); if (ecv == null) { ecv = new EConfigValue(); ecv.setTemplate(apiObject.getTemplate()); ecv.setService(apiObject.getService()); ecv.setConfigkey(apiObject.getKey()); } ecv.setValue(apiObject.getValue().toString()); this.configValueDAO.save(ecv); } @Override @Transactional public void save(ConfigValues cvs) { RESTAssert.assertNotNull(cvs); for (ConfigValue cv : cvs.getElements()) { this.prepareConfigValue(cv); EConfigValue existingCV = this.configValueDAO.findBy(cv.getTemplate(), cv.getService(), cv.getKey()); if (existingCV == null) { EConfigValue newConfigValue = new EConfigValue(cv.getTemplate(), cv.getService(), cv.getKey(), cv.getValue().toString()); this.configValueDAO.save(newConfigValue); } else { existingCV.setValue(cv.getValue().toString()); this.configValueDAO.save(existingCV); } } } private void prepareConfigValue(ConfigValue configValue) { String keyPattern; if (ConfigValueDAOHib.RESERVED_VARIABLE.equals(configValue.getService()) || ConfigValueDAOHib.RESERVED_VARIABLE.equals(configValue.getTemplate())) { keyPattern = "^\\$\\{[\\w\\.-]+\\}$"; } else { keyPattern = "^[\\w\\.-]+$"; } RESTAssert.assertPattern(configValue.getKey(), keyPattern); if (ReservedConfigKeyStore.instance.isReserved(configValue.getKey())) { throw new NotAcceptableException(); } if ((configValue.getTemplate() == null) || configValue.getTemplate().isEmpty()) { configValue.setTemplate(ConfigValueDAOHib.RESERVED_GLOBAL); } if ((configValue.getService() == null) || configValue.getService().isEmpty()) { configValue.setService(null); } } @Override @Transactional public void delete(String template, String service, String key) { RESTAssert.assertNotNull(template); RESTAssert.assertNotEmpty(key); if ((template == null) || template.isEmpty()) { template = ConfigValueDAOHib.RESERVED_GLOBAL; } if ((service == null) || service.isEmpty() || service.equals("null")) { service = null; } EConfigValue ecv = this.configValueDAO.findBy(template, service, key); RESTAssert.assertNotNull(ecv, Status.NOT_FOUND); this.configValueDAO.deleteById(ecv.getId()); } @Override @Transactional public void deleteForTemplate(String templateName) { RESTAssert.assertNotEmpty(templateName); List<EConfigValue> configs = this.configValueDAO.findAll(templateName); configs.forEach(cv -> this.configValueDAO.delete(cv)); } @Override @Transactional public ConfigValue[] getUnstacked(String template) { return this.getCleanUnstacked(template); } @Override @Transactional public void deleteForService(String template, String service) { RESTAssert.assertNotEmpty(template); RESTAssert.assertNotEmpty(service); List<EConfigValue> configsToDelete; if (ConfigValueDAOHib.RESERVED_GLOBAL.equals(service)) { configsToDelete = this.configValueDAO.findForGlobalService(template); } else { configsToDelete = this.configValueDAO.findBy(template, service); } configsToDelete.forEach(cv -> this.configValueDAO.delete(cv)); } @Override @Transactional public void migrateGlobalConfig() { this.globalConfigMigrator.migrateGlobalConfig(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.datastructures; import java.io.Externalizable; import java.io.IOException; import java.io.InvalidObjectException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.ObjectStreamException; import java.util.concurrent.Callable; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.IgniteInternalCache; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import static org.apache.ignite.internal.processors.cache.GridCacheUtils.retryTopologySafe; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; /** * Cache atomic reference implementation. */ public final class GridCacheAtomicReferenceImpl<T> implements GridCacheAtomicReferenceEx<T>, Externalizable { /** */ private static final long serialVersionUID = 0L; /** Deserialization stash. */ private static final ThreadLocal<IgniteBiTuple<GridKernalContext, String>> stash = new ThreadLocal<IgniteBiTuple<GridKernalContext, String>>() { @Override protected IgniteBiTuple<GridKernalContext, String> initialValue() { return new IgniteBiTuple<>(); } }; /** Logger. */ private IgniteLogger log; /** Atomic reference name. */ private String name; /** Status.*/ private volatile boolean rmvd; /** Check removed flag. */ private boolean rmvCheck; /** Atomic reference key. */ private GridCacheInternalKey key; /** Atomic reference projection. */ private IgniteInternalCache<GridCacheInternalKey, GridCacheAtomicReferenceValue<T>> atomicView; /** Cache context. */ private GridCacheContext ctx; /** Callable for {@link #get} operation */ private final Callable<T> getCall = new Callable<T>() { @Override public T call() throws Exception { GridCacheAtomicReferenceValue<T> ref = atomicView.get(key); if (ref == null) throw new IgniteCheckedException("Failed to find atomic reference with given name: " + name); return ref.get(); } }; /** * Empty constructor required by {@link Externalizable}. */ public GridCacheAtomicReferenceImpl() { // No-op. } /** * Default constructor. * * @param name Atomic reference name. * @param key Atomic reference key. * @param atomicView Atomic projection. * @param ctx Cache context. */ public GridCacheAtomicReferenceImpl(String name, GridCacheInternalKey key, IgniteInternalCache<GridCacheInternalKey, GridCacheAtomicReferenceValue<T>> atomicView, GridCacheContext ctx) { assert key != null; assert atomicView != null; assert ctx != null; assert name != null; this.ctx = ctx; this.key = key; this.atomicView = atomicView; this.name = name; log = ctx.logger(getClass()); } /** {@inheritDoc} */ @Override public String name() { return name; } /** {@inheritDoc} */ @Override public T get() { checkRemoved(); try { return CU.outTx(getCall, ctx); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public void set(T val) { checkRemoved(); try { CU.outTx(internalSet(val), ctx); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean compareAndSet(T expVal, T newVal) { return compareAndSetAndGet(newVal, expVal) == expVal; } /** * Compares current value with specified value for equality and, if they are equal, replaces current value. * * @param newVal New value to set. * @return Original value. */ public T compareAndSetAndGet(T newVal, T expVal) { checkRemoved(); try { return CU.outTx(internalCompareAndSetAndGet(expVal, newVal), ctx); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean onRemoved() { return rmvd = true; } /** {@inheritDoc} */ @Override public void needCheckNotRemoved() { rmvCheck = true; } /** {@inheritDoc} */ @Override public GridCacheInternalKey key() { return key; } /** {@inheritDoc} */ @Override public boolean removed() { return rmvd; } /** {@inheritDoc} */ @Override public void close() { if (rmvd) return; try { ctx.kernalContext().dataStructures().removeAtomicReference(name); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** * Method returns callable for execution {@link #set(Object)} operation in async and sync mode. * * @param val Value will be set in reference . * @return Callable for execution in async and sync mode. */ private Callable<Boolean> internalSet(final T val) { return retryTopologySafe(new Callable<Boolean>() { @Override public Boolean call() throws Exception { try (GridNearTxLocal tx = CU.txStartInternal(ctx, atomicView, PESSIMISTIC, REPEATABLE_READ)) { GridCacheAtomicReferenceValue<T> ref = atomicView.get(key); if (ref == null) throw new IgniteCheckedException("Failed to find atomic reference with given name: " + name); ref.set(val); atomicView.put(key, ref); tx.commit(); return true; } catch (Error | Exception e) { U.error(log, "Failed to set value [val=" + val + ", atomicReference=" + this + ']', e); throw e; } } }); } /** * Conditionally sets the new value. It will be set if {@code expValPred} is * evaluate to {@code true}. * * @param expVal Expected value. * @param newVal New value. * @return Callable for execution in async and sync mode. */ private Callable<T> internalCompareAndSetAndGet(final T expVal, final T newVal) { return retryTopologySafe(new Callable<T>() { @Override public T call() throws Exception { try (GridNearTxLocal tx = CU.txStartInternal(ctx, atomicView, PESSIMISTIC, REPEATABLE_READ)) { GridCacheAtomicReferenceValue<T> ref = atomicView.get(key); if (ref == null) throw new IgniteCheckedException("Failed to find atomic reference with given name: " + name); T origVal = ref.get(); if (!F.eq(expVal, origVal)) { tx.setRollbackOnly(); return origVal; } else { ref.set(newVal); atomicView.getAndPut(key, ref); tx.commit(); return expVal; } } catch (Error | Exception e) { U.error(log, "Failed to compare and value [expVal=" + expVal + ", newVal" + newVal + ", atomicReference" + this + ']', e); throw e; } } }); } /** * Check removed status. * * @throws IllegalStateException If removed. */ private void checkRemoved() throws IllegalStateException { if (rmvd) throw removedError(); if (rmvCheck) { try { rmvd = atomicView.get(key) == null; } catch (IgniteCheckedException e) { throw U.convertException(e); } rmvCheck = false; if (rmvd) { ctx.kernalContext().dataStructures().onRemoved(key, this); throw removedError(); } } } /** * @return Error. */ private IllegalStateException removedError() { return new IllegalStateException("Atomic reference was removed from cache: " + name); } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(ctx.kernalContext()); out.writeUTF(name); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { IgniteBiTuple<GridKernalContext, String> t = stash.get(); t.set1((GridKernalContext)in.readObject()); t.set2(in.readUTF()); } /** * Reconstructs object on unmarshalling. * * @return Reconstructed object. * @throws ObjectStreamException Thrown in case of unmarshalling error. */ @SuppressWarnings("unchecked") private Object readResolve() throws ObjectStreamException { try { IgniteBiTuple<GridKernalContext, String> t = stash.get(); return t.get1().dataStructures().atomicReference(t.get2(), null, false); } catch (IgniteCheckedException e) { throw U.withCause(new InvalidObjectException(e.getMessage()), e); } finally { stash.remove(); } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridCacheAtomicReferenceImpl.class, this); } }
// Copyright (c) 2012 Cloudera, Inc. All rights reserved. package com.cloudera.impala.catalog; import static com.cloudera.impala.thrift.ImpalaInternalServiceConstants.DEFAULT_PARTITION_ID; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; import org.junit.Test; import com.cloudera.impala.analysis.FunctionName; import com.cloudera.impala.analysis.HdfsUri; import com.cloudera.impala.analysis.LiteralExpr; import com.cloudera.impala.analysis.NumericLiteral; import com.cloudera.impala.catalog.MetaStoreClientPool.MetaStoreClient; import com.cloudera.impala.testutil.CatalogServiceTestCatalog; import com.google.common.collect.Lists; import com.google.common.collect.Sets; public class CatalogTest { private static CatalogServiceCatalog catalog_ = CatalogServiceTestCatalog.create(); private void checkTableCols(Db db, String tblName, int numClusteringCols, String[] colNames, Type[] colTypes) throws TableLoadingException { Table tbl = db.getTable(tblName); assertEquals(tbl.getName(), tblName); assertEquals(tbl.getNumClusteringCols(), numClusteringCols); List<Column> cols = tbl.getColumns(); assertEquals(colNames.length, colTypes.length); assertEquals(cols.size(), colNames.length); Iterator<Column> it = cols.iterator(); int i = 0; while (it.hasNext()) { Column col = it.next(); assertEquals(col.getName(), colNames[i]); assertTrue(col.getType().equals(colTypes[i])); ++i; } } private void checkHBaseTableCols(Db db, String hiveTableName, String hbaseTableName, String[] hiveColNames, String[] colFamilies, String[] colQualifiers, Type[] colTypes) throws TableLoadingException{ checkTableCols(db, hiveTableName, 1, hiveColNames, colTypes); HBaseTable tbl = (HBaseTable) db.getTable(hiveTableName); assertEquals(tbl.getHBaseTableName(), hbaseTableName); List<Column> cols = tbl.getColumns(); assertEquals(colFamilies.length, colTypes.length); assertEquals(colQualifiers.length, colTypes.length); Iterator<Column> it = cols.iterator(); int i = 0; while (it.hasNext()) { HBaseColumn col = (HBaseColumn)it.next(); assertEquals(col.getColumnFamily(), colFamilies[i]); assertEquals(col.getColumnQualifier(), colQualifiers[i]); ++i; } } @Test public void TestColSchema() throws CatalogException { Db functionalDb = catalog_.getDb("functional"); assertNotNull(functionalDb); assertEquals(functionalDb.getName(), "functional"); assertNotNull(catalog_.getOrLoadTable("functional", "alltypes")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypes_view")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypes_view_sub")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypessmall")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypeserror")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypeserrornonulls")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypesagg")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypesaggnonulls")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypesnopart")); assertNotNull(catalog_.getOrLoadTable("functional", "alltypesinsert")); assertNotNull(catalog_.getOrLoadTable("functional", "complex_view")); assertNotNull(catalog_.getOrLoadTable("functional", "testtbl")); assertNotNull(catalog_.getOrLoadTable("functional", "dimtbl")); assertNotNull(catalog_.getOrLoadTable("functional", "jointbl")); assertNotNull(catalog_.getOrLoadTable("functional", "liketbl")); assertNotNull(catalog_.getOrLoadTable("functional", "greptiny")); assertNotNull(catalog_.getOrLoadTable("functional", "rankingssmall")); assertNotNull(catalog_.getOrLoadTable("functional", "uservisitssmall")); assertNotNull(catalog_.getOrLoadTable("functional", "view_view")); // IMP-163 - table with string partition column does not load if there are partitions assertNotNull(catalog_.getOrLoadTable("functional", "StringPartitionKey")); // Test non-existent table assertNull(catalog_.getOrLoadTable("functional", "nonexistenttable")); // functional_seq contains the same tables as functional Db testDb = catalog_.getDb("functional_seq"); assertNotNull(testDb); assertEquals(testDb.getName(), "functional_seq"); assertNotNull(catalog_.getOrLoadTable("functional_seq", "alltypes")); assertNotNull(catalog_.getOrLoadTable("functional_seq", "testtbl")); Db hbaseDb = catalog_.getDb("functional_hbase"); assertNotNull(hbaseDb); assertEquals(hbaseDb.getName(), "functional_hbase"); // Loading succeeds for an HBase table that has binary columns and an implicit key // column mapping assertNotNull(catalog_.getOrLoadTable(hbaseDb.getName(), "alltypessmallbinary")); assertNotNull(catalog_.getOrLoadTable(hbaseDb.getName(), "alltypessmall")); assertNotNull(catalog_.getOrLoadTable(hbaseDb.getName(), "hbasealltypeserror")); assertNotNull(catalog_.getOrLoadTable(hbaseDb.getName(), "hbasealltypeserrornonulls")); assertNotNull(catalog_.getOrLoadTable(hbaseDb.getName(), "alltypesagg")); assertNotNull(catalog_.getOrLoadTable(hbaseDb.getName(), "stringids")); checkTableCols(functionalDb, "alltypes", 2, new String[] {"year", "month", "id", "bool_col", "tinyint_col", "smallint_col", "int_col", "bigint_col", "float_col", "double_col", "date_string_col", "string_col", "timestamp_col"}, new Type[] {Type.INT, Type.INT, Type.INT, Type.BOOLEAN, Type.TINYINT, Type.SMALLINT, Type.INT, Type.BIGINT, Type.FLOAT, Type.DOUBLE, Type.STRING, Type.STRING, Type.TIMESTAMP}); checkTableCols(functionalDb, "testtbl", 0, new String[] {"id", "name", "zip"}, new Type[] {Type.BIGINT, Type.STRING, Type.INT}); checkTableCols(testDb, "testtbl", 0, new String[] {"id", "name", "zip"}, new Type[] {Type.BIGINT, Type.STRING, Type.INT}); checkTableCols(functionalDb, "liketbl", 0, new String[] { "str_col", "match_like_col", "no_match_like_col", "match_regex_col", "no_match_regex_col"}, new Type[] {Type.STRING, Type.STRING, Type.STRING, Type.STRING, Type.STRING}); checkTableCols(functionalDb, "dimtbl", 0, new String[] {"id", "name", "zip"}, new Type[] {Type.BIGINT, Type.STRING, Type.INT}); checkTableCols(functionalDb, "jointbl", 0, new String[] {"test_id", "test_name", "test_zip", "alltypes_id"}, new Type[] {Type.BIGINT, Type.STRING, Type.INT, Type.INT}); checkHBaseTableCols(hbaseDb, "alltypessmall", "functional_hbase.alltypessmall", new String[] {"id", "bigint_col", "bool_col", "date_string_col", "double_col", "float_col", "int_col", "month", "smallint_col", "string_col", "timestamp_col", "tinyint_col", "year"}, new String[] {":key", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d"}, new String[] {null, "bigint_col", "bool_col", "date_string_col", "double_col", "float_col", "int_col", "month", "smallint_col", "string_col", "timestamp_col", "tinyint_col", "year"}, new Type[] {Type.INT, Type.BIGINT, Type.BOOLEAN, Type.STRING, Type.DOUBLE, Type.FLOAT, Type.INT, Type.INT, Type.SMALLINT, Type.STRING, Type.TIMESTAMP, Type.TINYINT, Type.INT}); checkHBaseTableCols(hbaseDb, "hbasealltypeserror", "functional_hbase.hbasealltypeserror", new String[] {"id", "bigint_col", "bool_col","date_string_col", "double_col", "float_col", "int_col", "smallint_col", "string_col","timestamp_col", "tinyint_col"}, new String[] {":key", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d"}, new String[] {null, "bigint_col", "bool_col","date_string_col", "double_col", "float_col", "int_col", "smallint_col", "string_col","timestamp_col", "tinyint_col"}, new Type[] {Type.INT, Type.BIGINT, Type.BOOLEAN, Type.STRING, Type.DOUBLE, Type.FLOAT, Type.INT, Type.SMALLINT, Type.STRING, Type.TIMESTAMP, Type.TINYINT}); checkHBaseTableCols(hbaseDb, "hbasealltypeserrornonulls", "functional_hbase.hbasealltypeserrornonulls", new String[] {"id", "bigint_col", "bool_col", "date_string_col", "double_col", "float_col", "int_col", "smallint_col", "string_col","timestamp_col", "tinyint_col"}, new String[] {":key", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d"}, new String[] {null, "bigint_col", "bool_col", "date_string_col", "double_col", "float_col", "int_col", "smallint_col", "string_col","timestamp_col", "tinyint_col"}, new Type[] {Type.INT, Type.BIGINT, Type.BOOLEAN, Type.STRING, Type.DOUBLE, Type.FLOAT, Type.INT, Type.SMALLINT, Type.STRING, Type.TIMESTAMP, Type.TINYINT}); checkHBaseTableCols(hbaseDb, "alltypesagg", "functional_hbase.alltypesagg", new String[] {"id", "bigint_col", "bool_col", "date_string_col", "day", "double_col", "float_col", "int_col", "month", "smallint_col", "string_col", "timestamp_col", "tinyint_col", "year"}, new String[] {":key", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d"}, new String[] {null, "bigint_col", "bool_col", "date_string_col", "day", "double_col", "float_col", "int_col", "month", "smallint_col", "string_col", "timestamp_col", "tinyint_col", "year"}, new Type[] {Type.INT, Type.BIGINT, Type.BOOLEAN, Type.STRING,Type.INT, Type.DOUBLE, Type.FLOAT, Type.INT, Type.INT, Type.SMALLINT, Type.STRING, Type.TIMESTAMP, Type.TINYINT, Type.INT}); checkHBaseTableCols(hbaseDb, "stringids", "functional_hbase.alltypesagg", new String[] {"id", "bigint_col", "bool_col", "date_string_col", "day", "double_col", "float_col", "int_col", "month", "smallint_col", "string_col", "timestamp_col", "tinyint_col", "year"}, new String[] {":key", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d", "d"}, new String[] {null, "bigint_col", "bool_col", "date_string_col", "day", "double_col", "float_col", "int_col", "month", "smallint_col", "string_col", "timestamp_col", "tinyint_col", "year"}, new Type[] {Type.STRING, Type.BIGINT, Type.BOOLEAN, Type.STRING,Type.INT, Type.DOUBLE, Type.FLOAT, Type.INT, Type.INT, Type.SMALLINT, Type.STRING, Type.TIMESTAMP, Type.TINYINT, Type.INT}); checkTableCols(functionalDb, "greptiny", 0, new String[] {"field"}, new Type[] {Type.STRING}); checkTableCols(functionalDb, "rankingssmall", 0, new String[] {"pagerank", "pageurl", "avgduration"}, new Type[] {Type.INT, Type.STRING, Type.INT}); checkTableCols(functionalDb, "uservisitssmall", 0, new String[] {"sourceip", "desturl", "visitdate", "adrevenue", "useragent", "ccode", "lcode", "skeyword", "avgtimeonsite"}, new Type[] {Type.STRING, Type.STRING, Type.STRING, Type.FLOAT, Type.STRING, Type.STRING, Type.STRING, Type.STRING, Type.INT}); // case-insensitive lookup assertEquals(catalog_.getOrLoadTable("functional", "alltypes"), catalog_.getOrLoadTable("functional", "AllTypes")); } @Test public void TestPartitions() throws CatalogException { HdfsTable table = (HdfsTable) catalog_.getOrLoadTable("functional", "AllTypes"); List<HdfsPartition> partitions = table.getPartitions(); // check that partition keys cover the date range 1/1/2009-12/31/2010 // and that we have one file per partition, plus the default partition assertEquals(25, partitions.size()); Set<Long> months = Sets.newHashSet(); for (HdfsPartition p: partitions) { if (p.getId() == DEFAULT_PARTITION_ID) { continue; } assertEquals(2, p.getPartitionValues().size()); LiteralExpr key1Expr = p.getPartitionValues().get(0); assertTrue(key1Expr instanceof NumericLiteral); long key1 = ((NumericLiteral) key1Expr).getLongValue(); assertTrue(key1 == 2009 || key1 == 2010); LiteralExpr key2Expr = p.getPartitionValues().get(1); assertTrue(key2Expr instanceof NumericLiteral); long key2 = ((NumericLiteral) key2Expr).getLongValue(); assertTrue(key2 >= 1 && key2 <= 12); months.add(key1 * 100 + key2); assertEquals(p.getFileDescriptors().size(), 1); } assertEquals(months.size(), 24); } // TODO: All Hive-stats related tests are temporarily disabled because of an unknown, // sporadic issue causing stats of some columns to be absent in Jenkins runs. // Investigate this issue further. //@Test public void testStats() throws TableLoadingException { // make sure the stats for functional.alltypesagg look correct HdfsTable table = (HdfsTable) catalog_.getDb("functional").getTable("AllTypesAgg"); Column idCol = table.getColumn("id"); assertEquals(idCol.getStats().getAvgSerializedSize() - PrimitiveType.INT.getSlotSize(), PrimitiveType.INT.getSlotSize(), 0.0001); assertEquals(idCol.getStats().getMaxSize(), PrimitiveType.INT.getSlotSize()); assertTrue(!idCol.getStats().hasNulls()); Column boolCol = table.getColumn("bool_col"); assertEquals(boolCol.getStats().getAvgSerializedSize() - PrimitiveType.BOOLEAN.getSlotSize(), PrimitiveType.BOOLEAN.getSlotSize(), 0.0001); assertEquals(boolCol.getStats().getMaxSize(), PrimitiveType.BOOLEAN.getSlotSize()); assertTrue(!boolCol.getStats().hasNulls()); Column tinyintCol = table.getColumn("tinyint_col"); assertEquals(tinyintCol.getStats().getAvgSerializedSize() - PrimitiveType.TINYINT.getSlotSize(), PrimitiveType.TINYINT.getSlotSize(), 0.0001); assertEquals(tinyintCol.getStats().getMaxSize(), PrimitiveType.TINYINT.getSlotSize()); assertTrue(tinyintCol.getStats().hasNulls()); Column smallintCol = table.getColumn("smallint_col"); assertEquals(smallintCol.getStats().getAvgSerializedSize() - PrimitiveType.SMALLINT.getSlotSize(), PrimitiveType.SMALLINT.getSlotSize(), 0.0001); assertEquals(smallintCol.getStats().getMaxSize(), PrimitiveType.SMALLINT.getSlotSize()); assertTrue(smallintCol.getStats().hasNulls()); Column intCol = table.getColumn("int_col"); assertEquals(intCol.getStats().getAvgSerializedSize() - PrimitiveType.INT.getSlotSize(), PrimitiveType.INT.getSlotSize(), 0.0001); assertEquals(intCol.getStats().getMaxSize(), PrimitiveType.INT.getSlotSize()); assertTrue(intCol.getStats().hasNulls()); Column bigintCol = table.getColumn("bigint_col"); assertEquals(bigintCol.getStats().getAvgSerializedSize() - PrimitiveType.BIGINT.getSlotSize(), PrimitiveType.BIGINT.getSlotSize(), 0.0001); assertEquals(bigintCol.getStats().getMaxSize(), PrimitiveType.BIGINT.getSlotSize()); assertTrue(bigintCol.getStats().hasNulls()); Column floatCol = table.getColumn("float_col"); assertEquals(floatCol.getStats().getAvgSerializedSize() - PrimitiveType.FLOAT.getSlotSize(), PrimitiveType.FLOAT.getSlotSize(), 0.0001); assertEquals(floatCol.getStats().getMaxSize(), PrimitiveType.FLOAT.getSlotSize()); assertTrue(floatCol.getStats().hasNulls()); Column doubleCol = table.getColumn("double_col"); assertEquals(doubleCol.getStats().getAvgSerializedSize() - PrimitiveType.DOUBLE.getSlotSize(), PrimitiveType.DOUBLE.getSlotSize(), 0.0001); assertEquals(doubleCol.getStats().getMaxSize(), PrimitiveType.DOUBLE.getSlotSize()); assertTrue(doubleCol.getStats().hasNulls()); Column timestampCol = table.getColumn("timestamp_col"); assertEquals(timestampCol.getStats().getAvgSerializedSize() - PrimitiveType.TIMESTAMP.getSlotSize(), PrimitiveType.TIMESTAMP.getSlotSize(), 0.0001); assertEquals(timestampCol.getStats().getMaxSize(), PrimitiveType.TIMESTAMP.getSlotSize()); // this does not have nulls, it's not clear why this passes // TODO: investigate and re-enable //assertTrue(timestampCol.getStats().hasNulls()); Column stringCol = table.getColumn("string_col"); assertTrue(stringCol.getStats().getAvgSerializedSize() >= PrimitiveType.STRING.getSlotSize()); assertTrue(stringCol.getStats().getAvgSerializedSize() > 0); assertTrue(stringCol.getStats().getMaxSize() > 0); assertTrue(!stringCol.getStats().hasNulls()); } /** * Verifies that updating column stats data for a type that isn't compatible with * the column type results in the stats being set to "unknown". This is a regression * test for IMPALA-588, where this used to result in a Preconditions failure. */ // TODO: All Hive-stats related tests are temporarily disabled because of an unknown, // sporadic issue causing stats of some columns to be absent in Jenkins runs. // Investigate this issue further. //@Test public void testColStatsColTypeMismatch() throws Exception { // First load a table that has column stats. //catalog_.refreshTable("functional", "alltypesagg", false); HdfsTable table = (HdfsTable) catalog_.getOrLoadTable("functional", "alltypesagg"); // Now attempt to update a column's stats with mismatched stats data and ensure // we get the expected results. MetaStoreClient client = catalog_.getMetaStoreClient(); try { // Load some string stats data and use it to update the stats of different // typed columns. ColumnStatisticsData stringColStatsData = client.getHiveClient() .getTableColumnStatistics("functional", "alltypesagg", Lists.newArrayList("string_col")).get(0).getStatsData(); assertTrue(!table.getColumn("int_col").updateStats(stringColStatsData)); assertStatsUnknown(table.getColumn("int_col")); assertTrue(!table.getColumn("double_col").updateStats(stringColStatsData)); assertStatsUnknown(table.getColumn("double_col")); assertTrue(!table.getColumn("bool_col").updateStats(stringColStatsData)); assertStatsUnknown(table.getColumn("bool_col")); // Do the same thing, but apply bigint stats to a string column. ColumnStatisticsData bigIntCol = client.getHiveClient() .getTableColumnStatistics("functional", "alltypes", Lists.newArrayList("bigint_col")).get(0).getStatsData(); assertTrue(!table.getColumn("string_col").updateStats(bigIntCol)); assertStatsUnknown(table.getColumn("string_col")); // Now try to apply a matching column stats data and ensure it succeeds. assertTrue(table.getColumn("string_col").updateStats(stringColStatsData)); assertEquals(1178, table.getColumn("string_col").getStats().getNumDistinctValues()); } finally { // Make sure to invalidate the metadata so the next test isn't using bad col stats //catalog_.refreshTable("functional", "alltypesagg", false); client.release(); } } private void assertStatsUnknown(Column column) { assertEquals(-1, column.getStats().getNumDistinctValues()); assertEquals(-1, column.getStats().getNumNulls()); double expectedSize = column.getType().isFixedLengthType() ? column.getType().getSlotSize() : -1; assertEquals(expectedSize, column.getStats().getAvgSerializedSize(), 0.0001); assertEquals(expectedSize, column.getStats().getMaxSize(), 0.0001); } @Test public void testInternalHBaseTable() throws CatalogException { // Cast will fail if table not an HBaseTable HBaseTable table = (HBaseTable) catalog_.getOrLoadTable("functional_hbase", "internal_hbase_table"); assertNotNull("functional_hbase.internal_hbase_table was not found", table); } @Test public void testDatabaseDoesNotExist() { Db nonExistentDb = catalog_.getDb("doesnotexist"); assertNull(nonExistentDb); } @Test public void testCreateTableMetadata() throws CatalogException { Table table = catalog_.getOrLoadTable("functional", "alltypes"); // Tables are created via Impala so the metadata should have been populated properly. // alltypes is an external table. assertEquals(System.getProperty("user.name"), table.getMetaStoreTable().getOwner()); assertEquals(TableType.EXTERNAL_TABLE.toString(), table.getMetaStoreTable().getTableType()); // alltypesinsert is created using CREATE TABLE LIKE and is a MANAGED table table = catalog_.getOrLoadTable("functional", "alltypesinsert"); assertEquals(System.getProperty("user.name"), table.getMetaStoreTable().getOwner()); assertEquals(TableType.MANAGED_TABLE.toString(), table.getMetaStoreTable().getTableType()); } @Test public void testLoadingUnsupportedTableTypes() throws CatalogException { Table table = catalog_.getOrLoadTable("functional", "hive_index_tbl"); assertTrue(table instanceof IncompleteTable); IncompleteTable incompleteTable = (IncompleteTable) table; assertTrue(incompleteTable.getCause() instanceof TableLoadingException); assertEquals("Unsupported table type 'INDEX_TABLE' for: functional.hive_index_tbl", incompleteTable.getCause().getMessage()); // Table with unsupported SerDe library. table = catalog_.getOrLoadTable("functional", "bad_serde"); assertTrue(table instanceof IncompleteTable); incompleteTable = (IncompleteTable) table; assertTrue(incompleteTable.getCause() instanceof TableLoadingException); assertEquals("Impala does not support tables of this type. REASON: SerDe" + " library 'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe' " + "is not supported.", incompleteTable.getCause().getCause().getMessage()); // Impala does not yet support Hive's LazyBinaryColumnarSerDe which can be // used for RCFILE tables. table = catalog_.getOrLoadTable("functional_rc", "rcfile_lazy_binary_serde"); assertTrue(table instanceof IncompleteTable); incompleteTable = (IncompleteTable) table; assertTrue(incompleteTable.getCause() instanceof TableLoadingException); assertEquals("Impala does not support tables of this type. REASON: SerDe" + " library 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' " + "is not supported.", incompleteTable.getCause().getCause().getMessage()); } // This table has metadata set so the escape is \n, which is also the tuple delim. This // test validates that our representation of the catalog fixes this and removes the // escape char. @Test public void TestTableWithBadEscapeChar() throws CatalogException { HdfsTable table = (HdfsTable) catalog_.getOrLoadTable("functional", "escapechartesttable"); List<HdfsPartition> partitions = table.getPartitions(); for (HdfsPartition p: partitions) { HdfsStorageDescriptor desc = p.getInputFormatDescriptor(); assertEquals(desc.getEscapeChar(), HdfsStorageDescriptor.DEFAULT_ESCAPE_CHAR); } } private List<String> getFunctionSignatures(String db) throws DatabaseNotFoundException { List<Function> fns = catalog_.getFunctions(db); List<String> names = Lists.newArrayList(); for (Function fn: fns) { names.add(fn.signatureString()); } return names; } @Test public void TestUdf() throws CatalogException { List<String> fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 0); ArrayList<Type> args1 = Lists.newArrayList(); ArrayList<Type> args2 = Lists.<Type>newArrayList(Type.INT); ArrayList<Type> args3 = Lists.<Type>newArrayList(Type.TINYINT); catalog_.removeFunction( new Function(new FunctionName("default", "Foo"), args1, Type.INVALID, false)); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 0); ScalarFunction udf1 = new ScalarFunction(new FunctionName("default", "Foo"), args1, Type.INVALID, new HdfsUri("/Foo"), "Foo.class", null, null); catalog_.addFunction(udf1); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 1); assertTrue(fnNames.contains("foo()")); // Same function name, overloaded arguments ScalarFunction udf2 = new ScalarFunction(new FunctionName("default", "Foo"), args2, Type.INVALID, new HdfsUri("/Foo"), "Foo.class", null, null); catalog_.addFunction(udf2); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 2); assertTrue(fnNames.contains("foo()")); assertTrue(fnNames.contains("foo(INT)")); // Add a function with a new name ScalarFunction udf3 = new ScalarFunction(new FunctionName("default", "Bar"), args2, Type.INVALID, new HdfsUri("/Foo"), "Foo.class", null, null); catalog_.addFunction(udf3); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 3); assertTrue(fnNames.contains("foo()")); assertTrue(fnNames.contains("foo(INT)")); assertTrue(fnNames.contains("bar(INT)")); // Drop Foo() catalog_.removeFunction(new Function( new FunctionName("default", "Foo"), args1, Type.INVALID, false)); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 2); assertTrue(fnNames.contains("foo(INT)")); assertTrue(fnNames.contains("bar(INT)")); // Drop it again, no-op catalog_.removeFunction(new Function( new FunctionName("default", "Foo"), args1, Type.INVALID, false)); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 2); assertTrue(fnNames.contains("foo(INT)")); assertTrue(fnNames.contains("bar(INT)")); // Drop bar(), no-op catalog_.removeFunction(new Function( new FunctionName("default", "Bar"), args1, Type.INVALID, false)); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 2); assertTrue(fnNames.contains("foo(INT)")); assertTrue(fnNames.contains("bar(INT)")); // Drop bar(tinyint), no-op catalog_.removeFunction(new Function( new FunctionName("default", "Bar"), args3, Type.INVALID, false)); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 2); assertTrue(fnNames.contains("foo(INT)")); assertTrue(fnNames.contains("bar(INT)")); // Drop bar(int) catalog_.removeFunction(new Function( new FunctionName("default", "Bar"), args2, Type.INVALID, false)); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 1); assertTrue(fnNames.contains("foo(INT)")); // Drop foo(int) catalog_.removeFunction(new Function( new FunctionName("default", "foo"), args2, Type.INVALID, false)); fnNames = getFunctionSignatures("default"); assertEquals(fnNames.size(), 0); } }
/* * Copyright (c) 2012-2016, b3log.org & hacpai.com & fangstar.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.b3log.symphony.service; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import javax.inject.Inject; import org.apache.commons.lang.StringUtils; import org.b3log.latke.Keys; import org.b3log.latke.logging.Level; import org.b3log.latke.logging.Logger; import org.b3log.latke.model.Pagination; import org.b3log.latke.model.User; import org.b3log.latke.repository.CompositeFilter; import org.b3log.latke.repository.CompositeFilterOperator; import org.b3log.latke.repository.Filter; import org.b3log.latke.repository.FilterOperator; import org.b3log.latke.repository.PropertyFilter; import org.b3log.latke.repository.Query; import org.b3log.latke.repository.RepositoryException; import org.b3log.latke.repository.SortDirection; import org.b3log.latke.service.ServiceException; import org.b3log.latke.service.annotation.Service; import org.b3log.latke.util.CollectionUtils; import org.b3log.latke.util.Paginator; import org.b3log.symphony.model.Common; import org.b3log.symphony.model.Order; import org.b3log.symphony.model.UserExt; import org.b3log.symphony.repository.OrderRepository; import org.b3log.symphony.repository.UserRepository; import org.json.JSONArray; import org.json.JSONObject; /** * Order query service. * * @author <a href="http://88250.b3log.org">Liang Ding</a> * @version 1.0.0.1, Mar 1, 2016 * @since 1.4.0 */ @Service public class OrderQueryService { /** * Logger. */ private static final Logger LOGGER = Logger.getLogger(OrderQueryService.class); /** * Order repository. */ @Inject private OrderRepository orderRepository; /** * User repository. */ @Inject private UserRepository userRepository; /** * Gets orders by the specified request json object. * * @param requestJSONObject the specified request json object, for example, <pre> * { * "paginationCurrentPageNum": 1, * "paginationPageSize": 20, * "paginationWindowSize": 10 * }, see {@link Pagination} for more details * </pre> * * @param orderFields the specified order fields to return * @return for example, <pre> * { * "pagination": { * "paginationPageCount": 100, * "paginationPageNums": [1, 2, 3, 4, 5] * }, * "orders": [{ * "oId": "", * "orderUserName": "", * "orderRealUserName": "", * * .... * }, ....] * } * </pre> * * @throws ServiceException service exception * @see Pagination */ public JSONObject getOrders(final JSONObject requestJSONObject, final Map<String, Class<?>> orderFields) throws ServiceException { final JSONObject ret = new JSONObject(); final int currentPageNum = requestJSONObject.optInt(Pagination.PAGINATION_CURRENT_PAGE_NUM); final int pageSize = requestJSONObject.optInt(Pagination.PAGINATION_PAGE_SIZE); final int windowSize = requestJSONObject.optInt(Pagination.PAGINATION_WINDOW_SIZE); final Query query = new Query().setCurrentPageNum(currentPageNum).setPageSize(pageSize) .addSort(Order.ORDER_STATUS, SortDirection.ASCENDING) .addSort(Keys.OBJECT_ID, SortDirection.DESCENDING); for (final Map.Entry<String, Class<?>> field : orderFields.entrySet()) { query.addProjection(field.getKey(), field.getValue()); } final List<Filter> filters = new ArrayList<Filter>(); final int status = requestJSONObject.optInt(Order.ORDER_STATUS); filters.add(new PropertyFilter(Order.ORDER_STATUS, FilterOperator.EQUAL, status)); if (requestJSONObject.has(Order.ORDER_PRODUCT_CATEGORY)) { filters.add(new PropertyFilter(Order.ORDER_PRODUCT_CATEGORY, FilterOperator.EQUAL, requestJSONObject.optString(Order.ORDER_PRODUCT_CATEGORY))); } else { filters.add(new PropertyFilter(Order.ORDER_PRODUCT_CATEGORY, FilterOperator.NOT_EQUAL, "")); } filters.add(new PropertyFilter(Order.ORDER_CREATE_TIME, FilterOperator.GREATER_THAN_OR_EQUAL, requestJSONObject.optLong(Common.FROM))); filters.add(new PropertyFilter(Order.ORDER_CREATE_TIME, FilterOperator.LESS_THAN_OR_EQUAL, requestJSONObject.optLong(Common.TO))); query.setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters)); JSONObject result = null; try { result = orderRepository.get(query); } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets order failed", e); throw new ServiceException(e); } final int pageCount = result.optJSONObject(Pagination.PAGINATION).optInt(Pagination.PAGINATION_PAGE_COUNT); final JSONObject pagination = new JSONObject(); ret.put(Pagination.PAGINATION, pagination); final List<Integer> pageNums = Paginator.paginate(currentPageNum, pageSize, pageCount, windowSize); pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCount); pagination.put(Pagination.PAGINATION_PAGE_NUMS, pageNums); final JSONArray data = result.optJSONArray(Keys.RESULTS); final List<JSONObject> orders = CollectionUtils.<JSONObject>jsonArrayToList(data); for (final JSONObject order : orders) { final String buyerId = order.optString(Order.ORDER_BUYER_ID); final String handlerId = order.optString(Order.ORDER_HANDLER_ID); try { final JSONObject buyer = userRepository.get(buyerId); final String userName = buyer.optString(User.USER_NAME); final String userRealName = buyer.optString(UserExt.USER_REAL_NAME); order.put(Order.ORDER_T_BUYER_NAME, userName); order.put(Order.ORDER_T_BUYER_REAL_NAME, userRealName); order.put(Order.ORDER_T_HANDLER_NAME, ""); order.put(Order.ORDER_T_HANDLER_REAL_NAME, ""); if (StringUtils.isNotBlank(handlerId)) { final JSONObject handler = userRepository.get(handlerId); final String handlerName = handler.optString(User.USER_NAME); final String handlerRealName = handler.optString(UserExt.USER_REAL_NAME); order.put(Order.ORDER_T_HANDLER_NAME, handlerName); order.put(Order.ORDER_T_HANDLER_REAL_NAME, handlerRealName); } } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Query user fialed", e); } final long createTime = order.optLong(Order.ORDER_CREATE_TIME); order.put(Order.ORDER_CREATE_TIME, new Date(createTime)); final long confirmTime = order.optLong(Order.ORDER_CONFIRM_TIME); order.put(Order.ORDER_CONFIRM_TIME, new Date(confirmTime)); } ret.put(Order.ORDERS, orders); return ret; } /** * Gets a order by the specified id. * * @param orderId the specified id * @return order, return {@code null} if not found * @throws ServiceException service exception */ public JSONObject getOrder(final String orderId) throws ServiceException { try { return orderRepository.get(orderId); } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, "Gets a order [orderId=" + orderId + "] failed", e); throw new ServiceException(e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.avro; import org.apache.avro.Conversions; import org.apache.avro.JsonProperties; import org.apache.avro.LogicalType; import org.apache.avro.LogicalTypes; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.Schema.Type; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericData.Array; import org.apache.avro.generic.GenericFixed; import org.apache.avro.generic.GenericRecord; import org.apache.avro.specific.SpecificRecord; import org.apache.avro.util.Utf8; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.apache.nifi.serialization.SimpleRecordSchema; import org.apache.nifi.serialization.record.DataType; import org.apache.nifi.serialization.record.MapRecord; import org.apache.nifi.serialization.record.Record; import org.apache.nifi.serialization.record.RecordField; import org.apache.nifi.serialization.record.RecordFieldType; import org.apache.nifi.serialization.record.RecordSchema; import org.apache.nifi.serialization.record.SchemaIdentifier; import org.apache.nifi.serialization.record.StandardSchemaIdentifier; import org.apache.nifi.serialization.record.type.ArrayDataType; import org.apache.nifi.serialization.record.type.ChoiceDataType; import org.apache.nifi.serialization.record.type.MapDataType; import org.apache.nifi.serialization.record.type.RecordDataType; import org.apache.nifi.serialization.record.util.DataTypeUtils; import org.apache.nifi.serialization.record.util.IllegalTypeConversionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.sql.Time; import java.sql.Timestamp; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Function; public class AvroTypeUtil { private static final Logger logger = LoggerFactory.getLogger(AvroTypeUtil.class); public static final String AVRO_SCHEMA_FORMAT = "avro"; private static final String LOGICAL_TYPE_DATE = "date"; private static final String LOGICAL_TYPE_TIME_MILLIS = "time-millis"; private static final String LOGICAL_TYPE_TIME_MICROS = "time-micros"; private static final String LOGICAL_TYPE_TIMESTAMP_MILLIS = "timestamp-millis"; private static final String LOGICAL_TYPE_TIMESTAMP_MICROS = "timestamp-micros"; private static final String LOGICAL_TYPE_DECIMAL = "decimal"; public static Schema extractAvroSchema(final RecordSchema recordSchema) { if (recordSchema == null) { throw new IllegalArgumentException("RecordSchema cannot be null"); } final Optional<String> schemaFormatOption = recordSchema.getSchemaFormat(); if (!schemaFormatOption.isPresent()) { return buildAvroSchema(recordSchema); } final String schemaFormat = schemaFormatOption.get(); if (!schemaFormat.equals(AVRO_SCHEMA_FORMAT)) { return buildAvroSchema(recordSchema); } final Optional<String> textOption = recordSchema.getSchemaText(); if (!textOption.isPresent()) { return buildAvroSchema(recordSchema); } final String text = textOption.get(); return new Schema.Parser().parse(text); } private static Schema buildAvroSchema(final RecordSchema recordSchema) { final List<Field> avroFields = new ArrayList<>(recordSchema.getFieldCount()); for (final RecordField recordField : recordSchema.getFields()) { avroFields.add(buildAvroField(recordField)); } final Schema avroSchema = Schema.createRecord("nifiRecord", null, "org.apache.nifi", false, avroFields); return avroSchema; } private static Field buildAvroField(final RecordField recordField) { final Schema schema = buildAvroSchema(recordField.getDataType(), recordField.getFieldName(), recordField.isNullable()); final Field field = new Field(recordField.getFieldName(), schema, null, recordField.getDefaultValue()); for (final String alias : recordField.getAliases()) { field.addAlias(alias); } return field; } private static Schema buildAvroSchema(final DataType dataType, final String fieldName, final boolean nullable) { final Schema schema; switch (dataType.getFieldType()) { case ARRAY: final ArrayDataType arrayDataType = (ArrayDataType) dataType; final DataType elementDataType = arrayDataType.getElementType(); if (RecordFieldType.BYTE.equals(elementDataType.getFieldType())) { schema = Schema.create(Type.BYTES); } else { final Schema elementType = buildAvroSchema(elementDataType, fieldName, false); schema = Schema.createArray(elementType); } break; case BIGINT: schema = Schema.create(Type.STRING); break; case BOOLEAN: schema = Schema.create(Type.BOOLEAN); break; case BYTE: schema = Schema.create(Type.INT); break; case CHAR: schema = Schema.create(Type.STRING); break; case CHOICE: final ChoiceDataType choiceDataType = (ChoiceDataType) dataType; final List<DataType> options = choiceDataType.getPossibleSubTypes(); // We need to keep track of which types have been added to the union, because if we have // two elements in the UNION with the same type, it will fail - even if the logical type is // different. So if we have an int and a logical type date (which also has a 'concrete type' of int) // then an Exception will be thrown when we try to create the union. To avoid this, we just keep track // of the Types and avoid adding it in such a case. final List<Schema> unionTypes = new ArrayList<>(options.size()); final Set<Type> typesAdded = new HashSet<>(); for (final DataType option : options) { final Schema optionSchema = buildAvroSchema(option, fieldName, false); if (!typesAdded.contains(optionSchema.getType())) { unionTypes.add(optionSchema); typesAdded.add(optionSchema.getType()); } } schema = Schema.createUnion(unionTypes); break; case DATE: schema = Schema.create(Type.INT); LogicalTypes.date().addToSchema(schema); break; case DOUBLE: schema = Schema.create(Type.DOUBLE); break; case FLOAT: schema = Schema.create(Type.FLOAT); break; case INT: schema = Schema.create(Type.INT); break; case LONG: schema = Schema.create(Type.LONG); break; case MAP: schema = Schema.createMap(buildAvroSchema(((MapDataType) dataType).getValueType(), fieldName, false)); break; case RECORD: final RecordDataType recordDataType = (RecordDataType) dataType; final RecordSchema childSchema = recordDataType.getChildSchema(); final List<Field> childFields = new ArrayList<>(childSchema.getFieldCount()); for (final RecordField field : childSchema.getFields()) { childFields.add(buildAvroField(field)); } schema = Schema.createRecord(fieldName + "Type", null, "org.apache.nifi", false, childFields); break; case SHORT: schema = Schema.create(Type.INT); break; case STRING: schema = Schema.create(Type.STRING); break; case TIME: schema = Schema.create(Type.INT); LogicalTypes.timeMillis().addToSchema(schema); break; case TIMESTAMP: schema = Schema.create(Type.LONG); LogicalTypes.timestampMillis().addToSchema(schema); break; default: return null; } if (nullable) { return nullable(schema); } else { return schema; } } private static Schema nullable(final Schema schema) { if (schema.getType() == Type.UNION) { final List<Schema> unionTypes = new ArrayList<>(schema.getTypes()); final Schema nullSchema = Schema.create(Type.NULL); if (unionTypes.contains(nullSchema)) { return schema; } unionTypes.add(nullSchema); return Schema.createUnion(unionTypes); } return Schema.createUnion(Schema.create(Type.NULL), schema); } /** * Returns a DataType for the given Avro Schema * * @param avroSchema the Avro Schema to convert * @return a Data Type that corresponds to the given Avro Schema */ public static DataType determineDataType(final Schema avroSchema) { return determineDataType(avroSchema, new HashMap<>()); } public static DataType determineDataType(final Schema avroSchema, Map<String, DataType> knownRecordTypes) { if (knownRecordTypes == null) { throw new IllegalArgumentException("'knownRecordTypes' cannot be null."); } final Type avroType = avroSchema.getType(); final LogicalType logicalType = avroSchema.getLogicalType(); if (logicalType != null) { final String logicalTypeName = logicalType.getName(); switch (logicalTypeName) { case LOGICAL_TYPE_DATE: return RecordFieldType.DATE.getDataType(); case LOGICAL_TYPE_TIME_MILLIS: case LOGICAL_TYPE_TIME_MICROS: return RecordFieldType.TIME.getDataType(); case LOGICAL_TYPE_TIMESTAMP_MILLIS: case LOGICAL_TYPE_TIMESTAMP_MICROS: return RecordFieldType.TIMESTAMP.getDataType(); case LOGICAL_TYPE_DECIMAL: // We convert Decimal to Double. // Alternatively we could convert it to String, but numeric type is generally more preferable by users. return RecordFieldType.DOUBLE.getDataType(); } } switch (avroType) { case ARRAY: return RecordFieldType.ARRAY.getArrayDataType(determineDataType(avroSchema.getElementType(), knownRecordTypes)); case BYTES: case FIXED: return RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType()); case BOOLEAN: return RecordFieldType.BOOLEAN.getDataType(); case DOUBLE: return RecordFieldType.DOUBLE.getDataType(); case ENUM: case STRING: return RecordFieldType.STRING.getDataType(); case FLOAT: return RecordFieldType.FLOAT.getDataType(); case INT: return RecordFieldType.INT.getDataType(); case LONG: return RecordFieldType.LONG.getDataType(); case RECORD: { String schemaFullName = avroSchema.getNamespace() + "." + avroSchema.getName(); if (knownRecordTypes.containsKey(schemaFullName)) { return knownRecordTypes.get(schemaFullName); } else { SimpleRecordSchema recordSchema = new SimpleRecordSchema(SchemaIdentifier.EMPTY); DataType recordSchemaType = RecordFieldType.RECORD.getRecordDataType(recordSchema); knownRecordTypes.put(schemaFullName, recordSchemaType); final List<Field> avroFields = avroSchema.getFields(); final List<RecordField> recordFields = new ArrayList<>(avroFields.size()); for (final Field field : avroFields) { final String fieldName = field.name(); final Schema fieldSchema = field.schema(); final DataType fieldType = determineDataType(fieldSchema, knownRecordTypes); final boolean nullable = isNullable(fieldSchema); addFieldToList(recordFields, field, fieldName, fieldSchema, fieldType, nullable); } recordSchema.setFields(recordFields); return recordSchemaType; } } case NULL: return RecordFieldType.STRING.getDataType(); case MAP: final Schema valueSchema = avroSchema.getValueType(); final DataType valueType = determineDataType(valueSchema, knownRecordTypes); return RecordFieldType.MAP.getMapDataType(valueType); case UNION: { final List<Schema> nonNullSubSchemas = getNonNullSubSchemas(avroSchema); if (nonNullSubSchemas.size() == 1) { return determineDataType(nonNullSubSchemas.get(0), knownRecordTypes); } final List<DataType> possibleChildTypes = new ArrayList<>(nonNullSubSchemas.size()); for (final Schema subSchema : nonNullSubSchemas) { final DataType childDataType = determineDataType(subSchema, knownRecordTypes); possibleChildTypes.add(childDataType); } return RecordFieldType.CHOICE.getChoiceDataType(possibleChildTypes); } } return null; } private static List<Schema> getNonNullSubSchemas(final Schema avroSchema) { final List<Schema> unionFieldSchemas = avroSchema.getTypes(); if (unionFieldSchemas == null) { return Collections.emptyList(); } final List<Schema> nonNullTypes = new ArrayList<>(unionFieldSchemas.size()); for (final Schema fieldSchema : unionFieldSchemas) { if (fieldSchema.getType() != Type.NULL) { nonNullTypes.add(fieldSchema); } } return nonNullTypes; } public static RecordSchema createSchema(final Schema avroSchema) { return createSchema(avroSchema, true); } public static RecordSchema createSchema(final Schema avroSchema, final boolean includeText) { if (avroSchema == null) { throw new IllegalArgumentException("Avro Schema cannot be null"); } SchemaIdentifier identifier = new StandardSchemaIdentifier.Builder().name(avroSchema.getName()).build(); return createSchema(avroSchema, includeText ? avroSchema.toString() : null, identifier); } /** * Converts an Avro Schema to a RecordSchema * * @param avroSchema the Avro Schema to convert * @param schemaText the textual representation of the schema * @param schemaId the identifier of the schema * @return the Corresponding Record Schema */ public static RecordSchema createSchema(final Schema avroSchema, final String schemaText, final SchemaIdentifier schemaId) { if (avroSchema == null) { throw new IllegalArgumentException("Avro Schema cannot be null"); } final String schemaFullName = avroSchema.getNamespace() + "." + avroSchema.getName(); final SimpleRecordSchema recordSchema = schemaText == null ? new SimpleRecordSchema(schemaId) : new SimpleRecordSchema(schemaText, AVRO_SCHEMA_FORMAT, schemaId); recordSchema.setSchemaName(avroSchema.getName()); recordSchema.setSchemaNamespace(avroSchema.getNamespace()); final DataType recordSchemaType = RecordFieldType.RECORD.getRecordDataType(recordSchema); final Map<String, DataType> knownRecords = new HashMap<>(); knownRecords.put(schemaFullName, recordSchemaType); final List<RecordField> recordFields = new ArrayList<>(avroSchema.getFields().size()); for (final Field field : avroSchema.getFields()) { final String fieldName = field.name(); final Schema fieldSchema = field.schema(); final DataType dataType = AvroTypeUtil.determineDataType(fieldSchema, knownRecords); final boolean nullable = isNullable(fieldSchema); addFieldToList(recordFields, field, fieldName, fieldSchema, dataType, nullable); } recordSchema.setFields(recordFields); return recordSchema; } public static boolean isNullable(final Schema schema) { final Type schemaType = schema.getType(); if (schemaType == Type.UNION) { for (final Schema unionSchema : schema.getTypes()) { if (isNullable(unionSchema)) { return true; } } } return schemaType == Type.NULL; } public static Object[] convertByteArray(final byte[] bytes) { final Object[] array = new Object[bytes.length]; for (int i = 0; i < bytes.length; i++) { array[i] = Byte.valueOf(bytes[i]); } return array; } public static ByteBuffer convertByteArray(final Object[] bytes) { final ByteBuffer bb = ByteBuffer.allocate(bytes.length); for (final Object o : bytes) { if (o instanceof Byte) { bb.put(((Byte) o).byteValue()); } else { throw new IllegalTypeConversionException("Cannot convert value " + bytes + " of type " + bytes.getClass() + " to ByteBuffer"); } } bb.flip(); return bb; } /** * Method that attempts to map a record field into a provided schema * @param avroSchema - Schema to map into * @param recordField - The field of the record to be mapped * @return Pair with the LHS being the field name and RHS being the mapped field from the schema */ protected static Pair<String, Field> lookupField(final Schema avroSchema, final RecordField recordField) { String fieldName = recordField.getFieldName(); // Attempt to locate the field as is in a true 1:1 mapping with the same name Field field = avroSchema.getField(fieldName); if (field == null) { // No straight mapping was found, so check the aliases to see if it can be mapped for(final String alias: recordField.getAliases()) { field = avroSchema.getField(alias); if (field != null) { fieldName = alias; break; } } } return new ImmutablePair<>(fieldName, field); } public static GenericRecord createAvroRecord(final Record record, final Schema avroSchema) throws IOException { return createAvroRecord(record, avroSchema, StandardCharsets.UTF_8); } public static GenericRecord createAvroRecord(final Record record, final Schema avroSchema, final Charset charset) throws IOException { final GenericRecord rec = new GenericData.Record(avroSchema); final RecordSchema recordSchema = record.getSchema(); for (final RecordField recordField : recordSchema.getFields()) { final Object rawValue = record.getValue(recordField); Pair<String, Field> fieldPair = lookupField(avroSchema, recordField); final String fieldName = fieldPair.getLeft(); final Field field = fieldPair.getRight(); if (field == null) { continue; } final Object converted = convertToAvroObject(rawValue, field.schema(), fieldName, charset); rec.put(fieldName, converted); } // see if the Avro schema has any fields that aren't in the RecordSchema, and if those fields have a default // value then we want to populate it in the GenericRecord being produced for (final Field field : avroSchema.getFields()) { final Optional<RecordField> recordField = recordSchema.getField(field.name()); if (!recordField.isPresent() && rec.get(field.name()) == null && field.defaultVal() != null) { rec.put(field.name(), field.defaultVal()); } } return rec; } /** * Convert a raw value to an Avro object to serialize in Avro type system, using the provided character set when necessary. * The counter-part method which reads an Avro object back to a raw value is {@link #normalizeValue(Object, Schema, String)}. */ public static Object convertToAvroObject(final Object rawValue, final Schema fieldSchema) { return convertToAvroObject(rawValue, fieldSchema, StandardCharsets.UTF_8); } /** * Convert a raw value to an Avro object to serialize in Avro type system, using the provided character set when necessary. * The counter-part method which reads an Avro object back to a raw value is {@link #normalizeValue(Object, Schema, String)}. */ public static Object convertToAvroObject(final Object rawValue, final Schema fieldSchema, final Charset charset) { return convertToAvroObject(rawValue, fieldSchema, fieldSchema.getName(), charset); } /** * Adds fields to <tt>recordFields</tt> list. * @param recordFields - record fields are added to this list. * @param field - the field * @param fieldName - field name * @param fieldSchema - field schema * @param dataType - data type * @param nullable - is nullable? */ private static void addFieldToList(final List<RecordField> recordFields, final Field field, final String fieldName, final Schema fieldSchema, final DataType dataType, final boolean nullable) { if (field.defaultVal() == JsonProperties.NULL_VALUE) { recordFields.add(new RecordField(fieldName, dataType, field.aliases(), nullable)); } else { Object defaultValue = field.defaultVal(); if (fieldSchema.getType() == Schema.Type.ARRAY && !DataTypeUtils.isArrayTypeCompatible(defaultValue, ((ArrayDataType) dataType).getElementType())) { defaultValue = defaultValue instanceof List ? ((List<?>) defaultValue).toArray() : new Object[0]; } recordFields.add(new RecordField(fieldName, dataType, defaultValue, field.aliases(), nullable)); } } private static Long getLongFromTimestamp(final Object rawValue, final Schema fieldSchema, final String fieldName) { final String format = AvroTypeUtil.determineDataType(fieldSchema).getFormat(); Timestamp t = DataTypeUtils.toTimestamp(rawValue, () -> DataTypeUtils.getDateFormat(format), fieldName); return t.getTime(); } @SuppressWarnings("unchecked") private static Object convertToAvroObject(final Object rawValue, final Schema fieldSchema, final String fieldName, final Charset charset) { if (rawValue == null) { return null; } switch (fieldSchema.getType()) { case INT: { final LogicalType logicalType = fieldSchema.getLogicalType(); if (logicalType == null) { return DataTypeUtils.toInteger(rawValue, fieldName); } if (LOGICAL_TYPE_DATE.equals(logicalType.getName())) { final String format = AvroTypeUtil.determineDataType(fieldSchema).getFormat(); final Date date = DataTypeUtils.toDate(rawValue, () -> DataTypeUtils.getDateFormat(format), fieldName); final Duration duration = Duration.between(new Date(0L).toInstant(), new Date(date.getTime()).toInstant()); final long days = duration.toDays(); return (int) days; } else if (LOGICAL_TYPE_TIME_MILLIS.equals(logicalType.getName())) { final String format = AvroTypeUtil.determineDataType(fieldSchema).getFormat(); final Time time = DataTypeUtils.toTime(rawValue, () -> DataTypeUtils.getDateFormat(format), fieldName); final Date date = new Date(time.getTime()); final Duration duration = Duration.between(date.toInstant().truncatedTo(ChronoUnit.DAYS), date.toInstant()); final long millisSinceMidnight = duration.toMillis(); return (int) millisSinceMidnight; } return DataTypeUtils.toInteger(rawValue, fieldName); } case LONG: { final LogicalType logicalType = fieldSchema.getLogicalType(); if (logicalType == null) { return DataTypeUtils.toLong(rawValue, fieldName); } if (LOGICAL_TYPE_TIME_MICROS.equals(logicalType.getName())) { final long longValue = getLongFromTimestamp(rawValue, fieldSchema, fieldName); final Date date = new Date(longValue); final Duration duration = Duration.between(date.toInstant().truncatedTo(ChronoUnit.DAYS), date.toInstant()); return duration.toMillis() * 1000L; } else if (LOGICAL_TYPE_TIMESTAMP_MILLIS.equals(logicalType.getName())) { final String format = AvroTypeUtil.determineDataType(fieldSchema).getFormat(); Timestamp t = DataTypeUtils.toTimestamp(rawValue, () -> DataTypeUtils.getDateFormat(format), fieldName); return getLongFromTimestamp(rawValue, fieldSchema, fieldName); } else if (LOGICAL_TYPE_TIMESTAMP_MICROS.equals(logicalType.getName())) { return getLongFromTimestamp(rawValue, fieldSchema, fieldName) * 1000L; } return DataTypeUtils.toLong(rawValue, fieldName); } case BYTES: case FIXED: final LogicalType logicalType = fieldSchema.getLogicalType(); if (logicalType != null && LOGICAL_TYPE_DECIMAL.equals(logicalType.getName())) { final LogicalTypes.Decimal decimalType = (LogicalTypes.Decimal) logicalType; final BigDecimal rawDecimal; if (rawValue instanceof BigDecimal) { rawDecimal = (BigDecimal) rawValue; } else if (rawValue instanceof Double) { rawDecimal = BigDecimal.valueOf((Double) rawValue); } else if (rawValue instanceof String) { rawDecimal = new BigDecimal((String) rawValue); } else if (rawValue instanceof Integer) { rawDecimal = new BigDecimal((Integer) rawValue); } else if (rawValue instanceof Long) { rawDecimal = new BigDecimal((Long) rawValue); } else { throw new IllegalTypeConversionException("Cannot convert value " + rawValue + " of type " + rawValue.getClass() + " to a logical decimal"); } // If the desired scale is different than this value's coerce scale. final int desiredScale = decimalType.getScale(); final BigDecimal decimal = rawDecimal.scale() == desiredScale ? rawDecimal : rawDecimal.setScale(desiredScale, BigDecimal.ROUND_HALF_UP); return fieldSchema.getType() == Type.BYTES ? new Conversions.DecimalConversion().toBytes(decimal, fieldSchema, logicalType) //return GenericByte : new Conversions.DecimalConversion().toFixed(decimal, fieldSchema, logicalType); //return GenericFixed } if (rawValue instanceof byte[]) { return ByteBuffer.wrap((byte[]) rawValue); } if (rawValue instanceof String) { return ByteBuffer.wrap(((String) rawValue).getBytes(charset)); } if (rawValue instanceof Object[]) { return AvroTypeUtil.convertByteArray((Object[]) rawValue); } else { throw new IllegalTypeConversionException("Cannot convert value " + rawValue + " of type " + rawValue.getClass() + " to a ByteBuffer"); } case MAP: if (rawValue instanceof Record) { final Record recordValue = (Record) rawValue; final Map<String, Object> map = new HashMap<>(); for (final RecordField recordField : recordValue.getSchema().getFields()) { final Object v = recordValue.getValue(recordField); if (v != null) { map.put(recordField.getFieldName(), v); } } return map; } else if (rawValue instanceof Map) { final Map<String, Object> objectMap = (Map<String, Object>) rawValue; final Map<String, Object> map = new HashMap<>(objectMap.size()); for (final String s : objectMap.keySet()) { final Object converted = convertToAvroObject(objectMap.get(s), fieldSchema.getValueType(), fieldName + "[" + s + "]", charset); map.put(s, converted); } return map; } else { throw new IllegalTypeConversionException("Cannot convert value " + rawValue + " of type " + rawValue.getClass() + " to a Map"); } case RECORD: final GenericData.Record avroRecord = new GenericData.Record(fieldSchema); final Record record = (Record) rawValue; for (final RecordField recordField : record.getSchema().getFields()) { final Object recordFieldValue = record.getValue(recordField); final String recordFieldName = recordField.getFieldName(); final Field field = fieldSchema.getField(recordFieldName); if (field == null) { continue; } final Object converted = convertToAvroObject(recordFieldValue, field.schema(), fieldName + "/" + recordFieldName, charset); avroRecord.put(recordFieldName, converted); } return avroRecord; case UNION: return convertUnionFieldValue(rawValue, fieldSchema, schema -> convertToAvroObject(rawValue, schema, fieldName, charset), fieldName); case ARRAY: final Object[] objectArray = (Object[]) rawValue; final List<Object> list = new ArrayList<>(objectArray.length); int i = 0; for (final Object o : objectArray) { final Object converted = convertToAvroObject(o, fieldSchema.getElementType(), fieldName + "[" + i + "]", charset); list.add(converted); i++; } return list; case BOOLEAN: return DataTypeUtils.toBoolean(rawValue, fieldName); case DOUBLE: return DataTypeUtils.toDouble(rawValue, fieldName); case FLOAT: return DataTypeUtils.toFloat(rawValue, fieldName); case NULL: return null; case ENUM: return new GenericData.EnumSymbol(fieldSchema, rawValue); case STRING: return DataTypeUtils.toString(rawValue, (String) null, charset); } return rawValue; } public static Map<String, Object> convertAvroRecordToMap(final GenericRecord avroRecord, final RecordSchema recordSchema) { return convertAvroRecordToMap(avroRecord, recordSchema, StandardCharsets.UTF_8); } public static Map<String, Object> convertAvroRecordToMap(final GenericRecord avroRecord, final RecordSchema recordSchema, final Charset charset) { final Map<String, Object> values = new HashMap<>(recordSchema.getFieldCount()); for (final RecordField recordField : recordSchema.getFields()) { Object value = avroRecord.get(recordField.getFieldName()); if (value == null) { for (final String alias : recordField.getAliases()) { value = avroRecord.get(alias); if (value != null) { break; } } } final String fieldName = recordField.getFieldName(); try { final Field avroField = avroRecord.getSchema().getField(fieldName); if (avroField == null) { values.put(fieldName, null); continue; } final Schema fieldSchema = avroField.schema(); final Object rawValue = normalizeValue(value, fieldSchema, fieldName); final DataType desiredType = recordField.getDataType(); final Object coercedValue = DataTypeUtils.convertType(rawValue, desiredType, fieldName, charset); values.put(fieldName, coercedValue); } catch (Exception ex) { logger.debug("fail to convert field " + fieldName, ex ); throw ex; } } return values; } /** * Convert value of a nullable union field. * @param originalValue original value * @param fieldSchema the union field schema * @param conversion the conversion function which takes a non-null field schema within the union field and returns a converted value * @return a converted value */ private static Object convertUnionFieldValue(final Object originalValue, final Schema fieldSchema, final Function<Schema, Object> conversion, final String fieldName) { boolean foundNonNull = false; for (final Schema subSchema : fieldSchema.getTypes()) { if (subSchema.getType() == Type.NULL) { continue; } foundNonNull = true; final DataType desiredDataType = AvroTypeUtil.determineDataType(subSchema); try { final Object convertedValue = conversion.apply(subSchema); if (isCompatibleDataType(convertedValue, desiredDataType)) { return convertedValue; } // For logical types those store with different type (e.g. BigDecimal as ByteBuffer), check compatibility using the original rawValue if (subSchema.getLogicalType() != null && DataTypeUtils.isCompatibleDataType(originalValue, desiredDataType)) { return convertedValue; } } catch (Exception e) { // If failed with one of possible types, continue with the next available option. if (logger.isDebugEnabled()) { logger.debug("Cannot convert value {} to type {}", originalValue, desiredDataType, e); } } } if (foundNonNull) { throw new IllegalTypeConversionException("Cannot convert value " + originalValue + " of type " + originalValue.getClass() + " because no compatible types exist in the UNION for field " + fieldName); } return null; } private static boolean isCompatibleDataType(final Object value, final DataType dataType) { if (value == null) { return false; } switch (dataType.getFieldType()) { case RECORD: if (value instanceof GenericRecord || value instanceof SpecificRecord) { return true; } break; case STRING: if (value instanceof Utf8) { return true; } break; case ARRAY: if (value instanceof Array || value instanceof List || value instanceof ByteBuffer) { return true; } break; case MAP: if (value instanceof Map) { return true; } } return DataTypeUtils.isCompatibleDataType(value, dataType); } /** * Convert an Avro object to a normal Java objects for further processing. * The counter-part method which convert a raw value to an Avro object is {@link #convertToAvroObject(Object, Schema, String, Charset)} */ private static Object normalizeValue(final Object value, final Schema avroSchema, final String fieldName) { if (value == null) { return null; } switch (avroSchema.getType()) { case INT: { final LogicalType logicalType = avroSchema.getLogicalType(); if (logicalType == null) { return value; } final String logicalName = logicalType.getName(); if (LOGICAL_TYPE_DATE.equals(logicalName)) { // date logical name means that the value is number of days since Jan 1, 1970 return new java.sql.Date(TimeUnit.DAYS.toMillis((int) value)); } else if (LOGICAL_TYPE_TIME_MILLIS.equals(logicalName)) { // time-millis logical name means that the value is number of milliseconds since midnight. return new java.sql.Time((int) value); } break; } case LONG: { final LogicalType logicalType = avroSchema.getLogicalType(); if (logicalType == null) { return value; } final String logicalName = logicalType.getName(); if (LOGICAL_TYPE_TIME_MICROS.equals(logicalName)) { return new java.sql.Time(TimeUnit.MICROSECONDS.toMillis((long) value)); } else if (LOGICAL_TYPE_TIMESTAMP_MILLIS.equals(logicalName)) { return new java.sql.Timestamp((long) value); } else if (LOGICAL_TYPE_TIMESTAMP_MICROS.equals(logicalName)) { return new java.sql.Timestamp(TimeUnit.MICROSECONDS.toMillis((long) value)); } break; } case UNION: if (value instanceof GenericData.Record) { final GenericData.Record avroRecord = (GenericData.Record) value; return normalizeValue(value, avroRecord.getSchema(), fieldName); } return convertUnionFieldValue(value, avroSchema, schema -> normalizeValue(value, schema, fieldName), fieldName); case RECORD: final GenericData.Record record = (GenericData.Record) value; final Schema recordSchema = record.getSchema(); final List<Field> recordFields = recordSchema.getFields(); final Map<String, Object> values = new HashMap<>(recordFields.size()); for (final Field field : recordFields) { final Object avroFieldValue = record.get(field.name()); final Object fieldValue = normalizeValue(avroFieldValue, field.schema(), fieldName + "/" + field.name()); values.put(field.name(), fieldValue); } final RecordSchema childSchema = AvroTypeUtil.createSchema(recordSchema, false); return new MapRecord(childSchema, values); case BYTES: final ByteBuffer bb = (ByteBuffer) value; final LogicalType logicalType = avroSchema.getLogicalType(); if (logicalType != null && LOGICAL_TYPE_DECIMAL.equals(logicalType.getName())) { return new Conversions.DecimalConversion().fromBytes(bb, avroSchema, logicalType); } return AvroTypeUtil.convertByteArray(bb.array()); case FIXED: final GenericFixed fixed = (GenericFixed) value; return AvroTypeUtil.convertByteArray(fixed.bytes()); case ENUM: return value.toString(); case NULL: return null; case STRING: return value.toString(); case ARRAY: if (value instanceof List) { final List<?> list = (List<?>) value; final Object[] valueArray = new Object[list.size()]; for (int i = 0; i < list.size(); i++) { final Schema elementSchema = avroSchema.getElementType(); valueArray[i] = normalizeValue(list.get(i), elementSchema, fieldName + "[" + i + "]"); } return valueArray; } else { final GenericData.Array<?> array = (GenericData.Array<?>) value; final Object[] valueArray = new Object[array.size()]; for (int i = 0; i < array.size(); i++) { final Schema elementSchema = avroSchema.getElementType(); valueArray[i] = normalizeValue(array.get(i), elementSchema, fieldName + "[" + i + "]"); } return valueArray; } case MAP: final Map<?, ?> avroMap = (Map<?, ?>) value; final Map<String, Object> map = new HashMap<>(avroMap.size()); for (final Map.Entry<?, ?> entry : avroMap.entrySet()) { Object obj = entry.getValue(); if (obj instanceof Utf8 || obj instanceof CharSequence) { obj = obj.toString(); } final String key = entry.getKey().toString(); obj = normalizeValue(obj, avroSchema.getValueType(), fieldName + "[" + key + "]"); map.put(key, obj); } return map; } return value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.parquet.format; import static org.apache.parquet.format.FileMetaData._Fields.CREATED_BY; import static org.apache.parquet.format.FileMetaData._Fields.KEY_VALUE_METADATA; import static org.apache.parquet.format.FileMetaData._Fields.NUM_ROWS; import static org.apache.parquet.format.FileMetaData._Fields.ROW_GROUPS; import static org.apache.parquet.format.FileMetaData._Fields.SCHEMA; import static org.apache.parquet.format.FileMetaData._Fields.VERSION; import static org.apache.parquet.format.event.Consumers.fieldConsumer; import static org.apache.parquet.format.event.Consumers.listElementsOf; import static org.apache.parquet.format.event.Consumers.listOf; import static org.apache.parquet.format.event.Consumers.struct; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; import org.apache.thrift.TBase; import org.apache.thrift.TException; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TIOStreamTransport; import org.apache.parquet.format.event.Consumers.Consumer; import org.apache.parquet.format.event.Consumers.DelegatingFieldConsumer; import org.apache.parquet.format.event.EventBasedThriftReader; import org.apache.parquet.format.event.TypedConsumer.I32Consumer; import org.apache.parquet.format.event.TypedConsumer.I64Consumer; import org.apache.parquet.format.event.TypedConsumer.StringConsumer; /** * Utility to read/write metadata * We use the TCompactProtocol to serialize metadata * * @author Julien Le Dem * */ public class Util { public static void writePageHeader(PageHeader pageHeader, OutputStream to) throws IOException { write(pageHeader, to); } public static PageHeader readPageHeader(InputStream from) throws IOException { return read(from, new PageHeader()); } public static void writeFileMetaData(org.apache.parquet.format.FileMetaData fileMetadata, OutputStream to) throws IOException { write(fileMetadata, to); } public static FileMetaData readFileMetaData(InputStream from) throws IOException { return read(from, new FileMetaData()); } /** * reads the meta data from the stream * @param from the stream to read the metadata from * @param skipRowGroups whether row groups should be skipped * @return the resulting metadata * @throws IOException */ public static FileMetaData readFileMetaData(InputStream from, boolean skipRowGroups) throws IOException { FileMetaData md = new FileMetaData(); if (skipRowGroups) { readFileMetaData(from, new DefaultFileMetaDataConsumer(md), skipRowGroups); } else { read(from, md); } return md; } /** * To read metadata in a streaming fashion. * * @author Julien Le Dem * */ public static abstract class FileMetaDataConsumer { abstract public void setVersion(int version); abstract public void setSchema(List<SchemaElement> schema); abstract public void setNumRows(long numRows); abstract public void addRowGroup(RowGroup rowGroup); abstract public void addKeyValueMetaData(KeyValue kv); abstract public void setCreatedBy(String createdBy); } /** * Simple default consumer that sets the fields * * @author Julien Le Dem * */ public static final class DefaultFileMetaDataConsumer extends FileMetaDataConsumer { private final FileMetaData md; public DefaultFileMetaDataConsumer(FileMetaData md) { this.md = md; } @Override public void setVersion(int version) { md.setVersion(version); } @Override public void setSchema(List<SchemaElement> schema) { md.setSchema(schema); } @Override public void setNumRows(long numRows) { md.setNum_rows(numRows); } @Override public void setCreatedBy(String createdBy) { md.setCreated_by(createdBy); } @Override public void addRowGroup(RowGroup rowGroup) { md.addToRow_groups(rowGroup); } @Override public void addKeyValueMetaData(KeyValue kv) { md.addToKey_value_metadata(kv); } } public static void readFileMetaData(InputStream from, FileMetaDataConsumer consumer) throws IOException { readFileMetaData(from, consumer, false); } public static void readFileMetaData(InputStream from, final FileMetaDataConsumer consumer, boolean skipRowGroups) throws IOException { try { DelegatingFieldConsumer eventConsumer = fieldConsumer() .onField(VERSION, new I32Consumer() { @Override public void consume(int value) { consumer.setVersion(value); } }).onField(SCHEMA, listOf(SchemaElement.class, new Consumer<List<SchemaElement>>() { @Override public void consume(List<SchemaElement> schema) { consumer.setSchema(schema); } })).onField(NUM_ROWS, new I64Consumer() { @Override public void consume(long value) { consumer.setNumRows(value); } }).onField(KEY_VALUE_METADATA, listElementsOf(struct(KeyValue.class, new Consumer<KeyValue>() { @Override public void consume(KeyValue kv) { consumer.addKeyValueMetaData(kv); } }))).onField(CREATED_BY, new StringConsumer() { @Override public void consume(String value) { consumer.setCreatedBy(value); } }); if (!skipRowGroups) { eventConsumer = eventConsumer.onField(ROW_GROUPS, listElementsOf(struct(RowGroup.class, new Consumer<RowGroup>() { @Override public void consume(RowGroup rowGroup) { consumer.addRowGroup(rowGroup); } }))); } new EventBasedThriftReader(protocol(from)).readStruct(eventConsumer); } catch (TException e) { throw new IOException("can not read FileMetaData: " + e.getMessage(), e); } } private static TProtocol protocol(OutputStream to) { return protocol(new TIOStreamTransport(to)); } private static TProtocol protocol(InputStream from) { return protocol(new TIOStreamTransport(from)); } private static InterningProtocol protocol(TIOStreamTransport t) { return new InterningProtocol(new TCompactProtocol(t)); } private static <T extends TBase<?,?>> T read(InputStream from, T tbase) throws IOException { try { tbase.read(protocol(from)); return tbase; } catch (TException e) { throw new IOException("can not read " + tbase.getClass() + ": " + e.getMessage(), e); } } private static void write(TBase<?, ?> tbase, OutputStream to) throws IOException { try { tbase.write(protocol(to)); } catch (TException e) { throw new IOException("can not write " + tbase, e); } } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.http; import java.util.Arrays; import java.util.List; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import org.junit.After; import org.junit.Test; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.boot.autoconfigure.gson.GsonAutoConfiguration; import org.springframework.boot.autoconfigure.http.JacksonHttpMessageConvertersConfiguration.MappingJackson2HttpMessageConverterConfiguration; import org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration; import org.springframework.boot.test.util.TestPropertyValues; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.rest.webmvc.config.RepositoryRestMvcConfiguration; import org.springframework.hateoas.ResourceSupport; import org.springframework.hateoas.mvc.TypeConstrainedMappingJackson2HttpMessageConverter; import org.springframework.http.converter.StringHttpMessageConverter; import org.springframework.http.converter.json.GsonHttpMessageConverter; import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.http.converter.xml.MappingJackson2XmlHttpMessageConverter; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link HttpMessageConvertersAutoConfiguration}. * * @author Dave Syer * @author Oliver Gierke * @author David Liu * @author Andy Wilkinson * @author Sebastien Deleuze */ public class HttpMessageConvertersAutoConfigurationTests { private AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); @After public void close() { if (this.context != null) { this.context.close(); } } @Test public void noObjectMapperMeansNoConverter() throws Exception { this.context.register(HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertThat(this.context.getBeansOfType(ObjectMapper.class)).isEmpty(); assertThat(this.context.getBeansOfType(MappingJackson2HttpMessageConverter.class)) .isEmpty(); assertThat( this.context.getBeansOfType(MappingJackson2XmlHttpMessageConverter.class)) .isEmpty(); } @Test public void defaultJacksonConverter() throws Exception { this.context.register(JacksonObjectMapperConfig.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertConverterBeanExists(MappingJackson2HttpMessageConverter.class, "mappingJackson2HttpMessageConverter"); assertConverterBeanRegisteredWithHttpMessageConverters( MappingJackson2HttpMessageConverter.class); } @Test public void defaultJacksonConvertersWithBuilder() throws Exception { this.context.register(JacksonObjectMapperBuilderConfig.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertConverterBeanExists(MappingJackson2HttpMessageConverter.class, "mappingJackson2HttpMessageConverter"); assertConverterBeanExists(MappingJackson2XmlHttpMessageConverter.class, "mappingJackson2XmlHttpMessageConverter"); assertConverterBeanRegisteredWithHttpMessageConverters( MappingJackson2HttpMessageConverter.class); assertConverterBeanRegisteredWithHttpMessageConverters( MappingJackson2XmlHttpMessageConverter.class); } @Test public void customJacksonConverter() throws Exception { this.context.register(JacksonObjectMapperConfig.class, JacksonConverterConfig.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertConverterBeanExists(MappingJackson2HttpMessageConverter.class, "customJacksonMessageConverter"); } @Test public void noGson() throws Exception { this.context.register(HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertThat(this.context.getBeansOfType(Gson.class).isEmpty()).isTrue(); assertThat(this.context.getBeansOfType(GsonHttpMessageConverter.class).isEmpty()) .isTrue(); } @Test public void defaultGsonConverter() throws Exception { this.context.register(GsonAutoConfiguration.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertConverterBeanExists(GsonHttpMessageConverter.class, "gsonHttpMessageConverter"); assertConverterBeanRegisteredWithHttpMessageConverters( GsonHttpMessageConverter.class); } @Test public void jacksonIsPreferredByDefaultWhenBothGsonAndJacksonAreAvailable() { this.context.register(GsonAutoConfiguration.class, JacksonAutoConfiguration.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertConverterBeanExists(MappingJackson2HttpMessageConverter.class, "mappingJackson2HttpMessageConverter"); assertConverterBeanRegisteredWithHttpMessageConverters( MappingJackson2HttpMessageConverter.class); assertThat(this.context.getBeansOfType(GsonHttpMessageConverter.class)).isEmpty(); } @Test public void gsonCanBePreferredWhenBothGsonAndJacksonAreAvailable() { this.context.register(GsonAutoConfiguration.class, JacksonAutoConfiguration.class, HttpMessageConvertersAutoConfiguration.class); TestPropertyValues.of("spring.http.converters.preferred-json-mapper:gson") .applyTo(this.context); this.context.refresh(); assertConverterBeanExists(GsonHttpMessageConverter.class, "gsonHttpMessageConverter"); assertConverterBeanRegisteredWithHttpMessageConverters( GsonHttpMessageConverter.class); assertThat(this.context.getBeansOfType(MappingJackson2HttpMessageConverter.class)) .isEmpty(); } @Test public void customGsonConverter() throws Exception { this.context.register(GsonAutoConfiguration.class, GsonConverterConfig.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertConverterBeanExists(GsonHttpMessageConverter.class, "customGsonMessageConverter"); assertConverterBeanRegisteredWithHttpMessageConverters( GsonHttpMessageConverter.class); } @Test public void defaultStringConverter() throws Exception { this.context.register(HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertConverterBeanExists(StringHttpMessageConverter.class, "stringHttpMessageConverter"); assertConverterBeanRegisteredWithHttpMessageConverters( StringHttpMessageConverter.class); } @Test public void customStringConverter() throws Exception { this.context.register(StringConverterConfig.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); assertConverterBeanExists(StringHttpMessageConverter.class, "customStringMessageConverter"); assertConverterBeanRegisteredWithHttpMessageConverters( StringHttpMessageConverter.class); } @Test public void typeConstrainedConverterDoesNotPreventAutoConfigurationOfJacksonConverter() throws Exception { this.context.register(JacksonObjectMapperBuilderConfig.class, TypeConstrainedConverterConfiguration.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); BeanDefinition beanDefinition = this.context .getBeanDefinition("mappingJackson2HttpMessageConverter"); assertThat(beanDefinition.getFactoryBeanName()).isEqualTo( MappingJackson2HttpMessageConverterConfiguration.class.getName()); } @Test public void typeConstrainedConverterFromSpringDataDoesNotPreventAutoConfigurationOfJacksonConverter() throws Exception { this.context.register(JacksonObjectMapperBuilderConfig.class, RepositoryRestMvcConfiguration.class, HttpMessageConvertersAutoConfiguration.class); this.context.refresh(); BeanDefinition beanDefinition = this.context .getBeanDefinition("mappingJackson2HttpMessageConverter"); assertThat(beanDefinition.getFactoryBeanName()).isEqualTo( MappingJackson2HttpMessageConverterConfiguration.class.getName()); } private void assertConverterBeanExists(Class<?> type, String beanName) { assertThat(this.context.getBeansOfType(type)).hasSize(1); List<String> beanNames = Arrays.asList(this.context.getBeanDefinitionNames()); assertThat(beanNames).contains(beanName); } private void assertConverterBeanRegisteredWithHttpMessageConverters(Class<?> type) { Object converter = this.context.getBean(type); HttpMessageConverters converters = this.context .getBean(HttpMessageConverters.class); assertThat(converters.getConverters().contains(converter)).isTrue(); } @Configuration protected static class JacksonObjectMapperConfig { @Bean public ObjectMapper objectMapper() { return new ObjectMapper(); } } @Configuration protected static class JacksonObjectMapperBuilderConfig { @Bean public ObjectMapper objectMapper() { return new ObjectMapper(); } @Bean public Jackson2ObjectMapperBuilder builder() { return new Jackson2ObjectMapperBuilder(); } } @Configuration protected static class JacksonConverterConfig { @Bean public MappingJackson2HttpMessageConverter customJacksonMessageConverter( ObjectMapper objectMapper) { MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter(); converter.setObjectMapper(objectMapper); return converter; } } @Configuration protected static class GsonConverterConfig { @Bean public GsonHttpMessageConverter customGsonMessageConverter(Gson gson) { GsonHttpMessageConverter converter = new GsonHttpMessageConverter(); converter.setGson(gson); return converter; } } @Configuration protected static class StringConverterConfig { @Bean public StringHttpMessageConverter customStringMessageConverter() { return new StringHttpMessageConverter(); } } @Configuration protected static class TypeConstrainedConverterConfiguration { @Bean public TypeConstrainedMappingJackson2HttpMessageConverter typeConstrainedConverter() { return new TypeConstrainedMappingJackson2HttpMessageConverter( ResourceSupport.class); } } }
/* * Copyright (c) 2014- MHISoft LLC and/or its affiliates. All rights reserved. * Licensed to MHISoft LLC under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. MHISoft LLC licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.mhisoft.rdpro.ui; import java.util.ArrayList; import java.util.List; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import org.mhisoft.rdpro.FileRemoveStatistics; import org.mhisoft.rdpro.RdProRunTimeProperties; /** * Description: Console UI * * @author Tony Xue * @since Nov, 2014 */ public class ConsoleRdProUIImpl extends AbstractRdProUIImpl { @Override public void print(final String msg) { System.out.print(msg); } @Override public void println(final String msg) { System.out.println(msg); } @Override public void printf(final String msg, Object args) { System.out.printf(msg, args); } @Override public boolean isAnswerY(String question) { Confirmation a = getConfirmation(question, Confirmation.YES, Confirmation.NO, Confirmation.HELP, Confirmation.QUIT); if (a == Confirmation.HELP) { help(); return false; } else if (Confirmation.YES != a) { return false; } return true; } @Override public Confirmation getConfirmation(String question, Confirmation... options) { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); print(question); String a = null; List<String> optionsList = new ArrayList<String>(); for (Confirmation option : options) { optionsList.add(option.toString().toLowerCase()); optionsList.add(option.toString().toUpperCase()); } try { while (a == null || a.trim().length() == 0) { a = br.readLine(); if (a != null && !optionsList.contains(a)) { print("\tresponse \"" + a + "\" not recognized. input again:"); a = null; //keep asking } } } catch (IOException e) { e.printStackTrace(); } Confirmation ret = Confirmation.fromString(a); if (ret != null) { if (ret == Confirmation.QUIT) System.exit(-2); return ret; } else { throw new RuntimeException("can't parse it, should not happen. input: " + a); } } public void help() { printBuildAndDisclaimer(); println("Usages (see https://github.com/mhisoft/rdpro/wiki):"); println("\t rdpro [option] path-to-search -d [target-dir] "); println("\t path-to-search: The root path to search, default to the current dir."); println("\t -d or -dir specify the target dir. only dir names matched this name will be deleted. " + "if target file pattern is also specified, only matched files under these matched dirs will be deleted."); println("\t -tf file match patterns. Use comma to delimit multiple file match patterns. ex: *.repositories,*.log"); println("\t -f force delete. Use it only when you are absolutely sure. Default:false "); println("\t -i interactive, Default:true"); println("\t -unlink Unlink the hard linked directory first. Files in the linked directory won't be removed. Default:false."); println("\t -v verbose mode. Default:false."); /*println("\t -w number of worker threads, default 5");*/ println("Examples:"); println("\tRemove everything under a dir (purge a directory and everything under it): "); println("\t\t>rdpro c:\\mytempfiles"); println("\tRemove all directories that matches a specified name recursively: "); println("\t\t>rdpro s:\\projects -d target "); println("\tRemove files matches a pattern recursively on Mac or Linux:"); println("\t\t$rdpro.sh /Users/home/projects -d target -tf *.zip"); } public RdProRunTimeProperties parseCommandLineArguments(String[] args) { RdProRunTimeProperties props = new RdProRunTimeProperties(); List<String> noneHyfenArgs = new ArrayList<String>(); props.setRootDir(System.getProperty("user.dir")); //default props.setUnLinkDirFirst(false); //default false for (int i = 0; i < args.length; i++) { String arg = args[i]; if (arg.trim().length() == 0 || arg.startsWith("org.mhisoft.rdpro")) //launched from sh script, the jar is the first argument. continue; if (arg.equalsIgnoreCase("-h") || arg.equalsIgnoreCase("-help")) { help(); props.setSuccess(false); return props; } else if (arg.equalsIgnoreCase("-v")) { props.setVerbose(true); } else if (arg.equalsIgnoreCase("-yes")) { //silent mode. !dangerous props.setAnswerYforAll(true); } else if (arg.equalsIgnoreCase("-debug")) { props.setDebug(true); } else if (arg.equalsIgnoreCase("-w")) { try { props.setNumberOfWorkers(Integer.parseInt(args[i + 1])); i++; //skip the next arg, it is the target. } catch (NumberFormatException e) { props.setNumberOfWorkers(5); } } else if (arg.equalsIgnoreCase("-f")) { props.setForceDelete(true); props.setInteractive(false);} else if (arg.equalsIgnoreCase("-unlink")) { props.setUnLinkDirFirst(true); } else if (arg.equalsIgnoreCase("-tf")) { props.setTargetFilePatterns(args[i + 1]); i++; } else if (arg.equalsIgnoreCase("-i")) { props.setInteractive(true); props.setForceDelete(false); } else if (arg.equalsIgnoreCase("-d") || arg.equalsIgnoreCase("-dir")) { if (i + 1 < args.length) props.setTargetDir(args[i + 1]); else props.setTargetDir(null); i++; //skip the next arg, it is the target. } else { if (arg.startsWith("-")) { System.err.println("The argument is not recognized:" + arg); props.setSuccess(false); return props; } else //not start with "-" if (arg != null && arg.trim().length() > 0) noneHyfenArgs.add(arg); } } if (noneHyfenArgs.size() == 0) { props.setRootDir(System.getProperty("user.dir")); } else if (noneHyfenArgs.size() == 1) { //rdpro d:\temp -d classes if (props.getTargetDir() != null) props.setRootDir(noneHyfenArgs.get(0)); //rdpro d:\temp (this is WIN) not applying for MAC // else if (noneHyfenArgs.get(0).contains(":") || noneHyfenArgs.get(0).startsWith("\\") // || noneHyfenArgs.get(0).startsWith("/")) { // // props.setRootDir(noneHyfenArgs.get(0)); // } // else { props.setRootDir(noneHyfenArgs.get(0)); // } } else if (noneHyfenArgs.size() >= 2) { props.setRootDir(noneHyfenArgs.get(0)); props.setTargetDir(noneHyfenArgs.get(1)); } if (props.getRootDir() == null) { println("The root directory is not set. Using the current dir."); props.setRootDir(System.getProperty("user.dir")); } println(""); props.setSuccess(true); return props; } public void reportStatus(FileRemoveStatistics frs) { //todo } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.river.jeri.internal.http; import java.io.UnsupportedEncodingException; import org.apache.river.jeri.internal.runtime.BASE64Encoder; import java.net.Authenticator; import java.net.InetAddress; import java.net.PasswordAuthentication; import java.net.UnknownHostException; import java.security.AccessController; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Remote HTTP server version/authentication information. * * REMIND: need manage/null out password more strictly? * * @author Sun Microsystems, Inc. * */ class ServerInfo implements Cloneable { /** blank timestamp value */ static final long NO_TIMESTAMP = -1L; /** hexadecimal char conversion table */ private static final char[] hexChars = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; /** server host name */ final String host; /** server port */ final int port; /** HTTP major version */ int major = 1; /** HTTP minor version */ int minor = 0; /** authentication scheme, if any */ String authScheme; /** authentication realm */ String authRealm; /** authentication algorithm */ String authAlgorithm; /** authentication cookie */ String authOpaque; /** authentication challenge */ String authNonce; /** authentication username */ String authUser; /** authentication password */ String authPassword; /** time of last update */ long timestamp = NO_TIMESTAMP; /** * Creates new ServerInfo for server at given host/port. */ ServerInfo(String host, int port) { this.host = host; this.port = port; } /** * Sets authentication information based on contents of given challenge * string (which should be the value of either the "WWW-Authenticate" or * "Proxy-Authenticate" header fields). If given string is null or empty, * clears any previous authentication information. */ void setAuthInfo(String str) throws HttpParseException { if (str == null || str.length() == 0) { authScheme = null; authRealm = null; authAlgorithm = null; authOpaque = null; authNonce = null; authUser = null; authPassword = null; return; } LineParser lp = new LineParser(str); Map entries = lp.getEntries("Digest"); if (entries != null) { String realm = (String) entries.get("realm"); String nonce = (String) entries.get("nonce"); if (realm != null && nonce != null) { authScheme = "Digest"; authRealm = realm; authNonce = nonce; authAlgorithm = (String) entries.get("algorithm"); authOpaque = (String) entries.get("opaque"); if (!"true".equalsIgnoreCase((String) entries.get("stale"))) { authUser = null; authPassword = null; } return; } } if ((entries = lp.getEntries("Basic")) != null) { String realm = (String) entries.get("realm"); if (realm != null) { authScheme = "Basic"; authRealm = realm; authAlgorithm = null; authOpaque = null; authNonce = null; authUser = null; authPassword = null; return; } } // REMIND: no supported schemes found; clear auth info? } /** * Updates authentication information based on contents of given string * (which should be the value of either the "Authorization-Info" or * "Proxy-Authorization-Info" header fields). If given string is null or * empty, current authentication settings are left unchanged. */ void updateAuthInfo(String str) throws HttpParseException { if (str == null || str.length() == 0) { return; } if ("Digest".equals(authScheme)) { LineParser lp = new LineParser(str); Map entries = lp.getAllEntries(); String nextNonce = (String) entries.get("nextnonce"); if (nextNonce != null) { authNonce = nextNonce; } } } /** * Returns (possibly null) authorization string based on current * authentication information in conjunction with the given request * arguments. */ String getAuthString(String protocol, String method, String uri) { if (authScheme == null) { return null; } if (authUser == null) { PasswordAuthentication pa = getPassword(protocol); if (pa == null) { return null; } String user = pa.getUserName(); char[] password = pa.getPassword(); if (user == null || password == null) { return null; } authUser = user; authPassword = new String(password); } if (authScheme.equals("Basic")) { BASE64Encoder enc = new BASE64Encoder(); try { return "Basic " + enc.encode((authUser + ":" + authPassword).getBytes("UTF-8")); } catch (UnsupportedEncodingException ex) { return null; } } else if (authScheme.equals("Digest")) { String digest; try { digest = computeDigest(method, uri); } catch (NoSuchAlgorithmException ex) { return null; } catch (UnsupportedEncodingException ex) { return null; } String response = "Digest " + "username=\"" + authUser + "\", " + "realm=\"" + authRealm + "\", " + "nonce=\"" + authNonce + "\", " + "uri=\"" + uri + "\", " + "response=\"" + digest + "\""; if (authOpaque != null) { response += ", opaque=\"" + authOpaque + "\""; } if (authAlgorithm != null) { response += ", algorithm=" + authAlgorithm; } return response; } else { throw new InternalError(); } } /** * Computes digest authentication response for request using the given * method and uri. Throws NoSuchAlgorithmException if server-specified * digest algorithm not supported. */ private String computeDigest(String method, String uri) throws NoSuchAlgorithmException, UnsupportedEncodingException { // REMIND: cache MessageDigest? MessageDigest md = MessageDigest.getInstance( (authAlgorithm != null) ? authAlgorithm : "MD5"); String hashA1 = encode(md, authUser + ":" + authRealm + ":" + authPassword); String hashA2 = encode(md, method + ":" + uri); return encode(md, hashA1 + ":" + authNonce + ":" + hashA2); } /** * Returns digest of the given string, represented as string of hexadecimal * digits. */ private String encode(MessageDigest md, String str) throws UnsupportedEncodingException { md.reset(); byte[] digest = md.digest(str.getBytes("UTF-8")); StringBuffer sbuf = new StringBuffer(digest.length * 2); for (int i = 0; i < digest.length; i++) { sbuf.append(hexChars[(digest[i] >>> 4) & 0xF]); sbuf.append(hexChars[digest[i] & 0xF]); } return sbuf.toString(); } public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException ex) { throw new InternalError(); } } /** * Class for parsing multi-part HTTP header lines that may appear as the * values of the WWW-Authenticate, Proxy-Authenticate, Authorization-Info * or Proxy-Authorization-Info header lines. */ private static class LineParser { /* token types */ private static final int EOL = -1; private static final int WORD = 0; private static final int QUOTE = 1; private static final int COMMA = 2; private static final int EQUALS = 3; private final List entries = new ArrayList(); private final char[] ca; private int pos = 0; private String tokenString = null; /** * Attempts to parse the given line into a series of key/optional value * definitions. Throws an HttpParseException if the line contains * syntax errors. */ LineParser(String line) throws HttpParseException { ca = line.toCharArray(); int tok = nextToken(); while (tok != EOL) { String key; if (tok == COMMA) { tok = nextToken(); continue; } else if (tok == WORD) { key = tokenString; } else { throw new HttpParseException("illegal key"); } tok = nextToken(); if (tok == COMMA) { entries.add(new String[] { key, null }); tok = nextToken(); continue; } else if (tok != EQUALS) { entries.add(new String[] { key, null }); continue; } tok = nextToken(); if (tok != WORD && tok != QUOTE) { throw new HttpParseException("illegal value"); } entries.add(new String[] { key, tokenString }); tok = nextToken(); if (tok == COMMA) { tok = nextToken(); continue; } else if (tok != EOL) { throw new HttpParseException("illegal separator"); } } } /** * Returns code indicating next token in line. If token type is WORD, * tokenString is set to the word text; if returned type is QUOTE, * tokenString is set to the quoted string's contents. */ private int nextToken() throws HttpParseException { int mark; while (pos < ca.length && Character.isWhitespace(ca[pos])) { pos++; } if (pos >= ca.length) { return EOL; } switch (ca[pos]) { case ',': pos++; return COMMA; case '=': pos++; return EQUALS; case '\"': mark = ++pos; while (pos < ca.length && ca[pos] != '\"') { pos++; } if (pos >= ca.length) { throw new HttpParseException( "unterminated quote string"); } tokenString = new String(ca, mark, pos++ - mark); return QUOTE; default: mark = pos; while (pos < ca.length) { char c = ca[pos]; if (c == ',' || c == '=' || c == '\"' || Character.isWhitespace(c)) { break; } pos++; } tokenString = new String(ca, mark, pos - mark); return WORD; } } /** * Returns all key/value entries associated with the given * authorization scheme in the parsed line, or null if the given scheme * was not described in the parsed line. Authorization scheme * identifiers are those which appear without a subsequent '=' or ',' * character before the next word; key/value entries are associated * with the nearest preceding scheme identifier. All key strings are * converted to lower case. */ Map getEntries(String scheme) { Map map = null; String[][] ea = (String[][]) entries.toArray(new String[entries.size()][]); int i; for (i = 0; i < ea.length; i++) { if (ea[i][1] == null && scheme.equalsIgnoreCase(ea[i][0])) { map = new HashMap(); break; } } if (map != null) { for (i++; i < ea.length && ea[i][1] != null; i++) { map.put(ea[i][0].toLowerCase(), ea[i][1]); } } return map; } /** * Returns the key/value entries encountered in the parsed line. This * method should be used to obtain the parse results for * Authorization-Info and Proxy-Authorization-Info values, which do not * contain authorization scheme identifiers. All key strings are * converted to lower case. */ Map getAllEntries() { Map map = new HashMap(); String[][] ea = (String[][]) entries.toArray(new String[entries.size()][]); for (int i = 0; i < ea.length; i++) { map.put(ea[i][0].toLowerCase(), ea[i][1]); } return map; } } /** * Obtains PasswordAuthentication from the currently installed * Authenticator. */ private PasswordAuthentication getPassword(final String protocol) { return (PasswordAuthentication) AccessController.doPrivileged( new PrivilegedAction() { public Object run() { InetAddress addr = null; try { addr = InetAddress.getByName(host); } catch (UnknownHostException ex) { } return Authenticator.requestPasswordAuthentication( addr, port, protocol, authRealm, authScheme); } } ); } }
/** * Copyright (C) 2014-2017 Xavier Witdouck * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zavtech.morpheus.sink; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.function.Consumer; import com.google.gson.stream.JsonWriter; import com.zavtech.morpheus.array.ArrayType; import com.zavtech.morpheus.frame.DataFrame; import com.zavtech.morpheus.frame.DataFrameColumn; import com.zavtech.morpheus.frame.DataFrameException; import com.zavtech.morpheus.frame.DataFrameSink; import com.zavtech.morpheus.util.Initialiser; import com.zavtech.morpheus.util.text.Formats; import com.zavtech.morpheus.util.text.printer.Printer; /** * A DataFrameSink implementation that writes a DataFrame out in a Morpheus specific JSON format. * * <p><strong>This is open source software released under the <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache 2.0 License</a></strong></p> * * @author Xavier Witdouck */ public class JsonSink<R,C> implements DataFrameSink<R,C,JsonSinkOptions> { /** * Constructor */ public JsonSink() { super(); } @Override public void write(DataFrame<R,C> frame, Consumer<JsonSinkOptions> configurator) { JsonWriter writer = null; try { final JsonSinkOptions jsonOptions = Initialiser.apply(JsonSinkOptions.class, configurator); final String encoding = jsonOptions.getEncoding(); final OutputStream os = jsonOptions.getResource().toOutputStream(); writer = new JsonWriter(new OutputStreamWriter(os, encoding)); writer.setIndent(" "); writer.beginObject(); writer.name("DataFrame"); writer.beginObject(); writer.name("rowCount").value(frame.rowCount()); writer.name("colCount").value(frame.colCount()); writeRowKeys(frame, writer, jsonOptions); writerColumns(frame, writer, jsonOptions); writer.endObject(); writer.endObject(); } catch (Exception ex) { throw new DataFrameException("Failed to write DataFrame to JSON output", ex); } finally { try { if (writer != null) { writer.flush(); writer.close(); } } catch (Exception ex) { ex.printStackTrace(); } } } /** * Writes row keys to json writer * @param frame the frame reference */ @SuppressWarnings("unchecked") private <R,C> void writeRowKeys(DataFrame<R,C> frame, JsonWriter writer, JsonSinkOptions options) { try { final Formats formats = options.getFormats(); final Class keyType = frame.rows().keyType(); writer.name("rowKeys"); writer.beginObject(); writer.name("type").value(keyType.getSimpleName()); writer.name("values"); writer.beginArray(); final Printer<Object> format = formats.getPrinterOrFail(keyType, null); frame.rows().keys().forEach(rowKey -> { try { writer.value(format.apply(rowKey)); } catch (IOException ex) { throw new DataFrameException("Failed to write DataFrame row key: " + rowKey, ex); } }); writer.endArray(); writer.endObject(); } catch (DataFrameException ex) { throw ex; } catch (Exception ex) { throw new DataFrameException("Failed to write row keys for DataFrame", ex); } } /** * Writes column data to the json writer * @param frame the frame reference */ private <R,C> void writerColumns(DataFrame<R,C> frame, JsonWriter writer, JsonSinkOptions options) { try { writer.name("columns"); writer.beginArray(); frame.cols().forEach(column -> { writeColumn(frame, column, writer, options); }); writer.endArray(); } catch (DataFrameException ex) { throw ex; } catch (Exception ex) { throw new DataFrameException("Failed to write DataFrame columns to json", ex); } } /** * Writes data for a specific column to the json writer * @param frame the frame reference * @param column the column reference */ @SuppressWarnings("unchecked") private <R,C> void writeColumn(DataFrame<R,C> frame, DataFrameColumn<R,C> column, JsonWriter writer, JsonSinkOptions options) { try { final C colKey = column.key(); final Formats formats = options.getFormats(); final Class<?> colType = frame.cols().type(colKey); final ArrayType type = ArrayType.of(colType); final Object defaultValue = ArrayType.defaultValue(colType); final Class<?> typeClass = frame.cols().type(colKey); final Printer<Object> keyPrinter = formats.getPrinterOrFail(colKey.getClass()); final Printer<Object> valuePrinter = formats.getPrinterOrFail(colKey, typeClass); if (valuePrinter == null) { throw new IllegalStateException("No Formats printer for column: " + colKey); } writer.beginObject(); writer.name("key").value(keyPrinter.apply(colKey)); writer.name("keyType").value(colKey.getClass().getSimpleName()); writer.name("dataType").value(typeClass.getSimpleName()); if (type.isBoolean()) { final boolean defaultBoolean = defaultValue == null ? false : (Boolean)defaultValue; writer.name("defaultValue").value(defaultBoolean); writer.name("values").beginArray(); column.forEachValue(v -> { try { writer.value(v.getBoolean()); } catch (IOException ex) { throw new DataFrameException("Failed to write DataFrame values for column " + colKey, ex); } }); } else if (type.isInteger()) { final Number defaultNumber = defaultValue == null ? 0 : (Number) defaultValue; writer.name("defaultValue").value(defaultNumber); writer.name("values").beginArray(); column.forEachValue(v -> { try { writer.value(v.getInt()); } catch (IOException ex) { throw new DataFrameException("Failed to write DataFrame values for column " + colKey, ex); } }); } else if (type.isLong()) { final Number defaultNumber = defaultValue == null ? 0 : (Number)defaultValue; writer.name("defaultValue").value(defaultNumber); writer.name("values").beginArray(); column.forEachValue(v -> { try { writer.value(v.getLong()); } catch (IOException ex) { throw new DataFrameException("Failed to write DataFrame values for column " + colKey, ex); } }); } else if (type.isDouble()) { final Number defaultNumber = defaultValue == null ? 0 : (Number)defaultValue; writer.name("defaultValue").value(Double.isNaN(defaultNumber.doubleValue()) ? null : defaultNumber); writer.name("values").beginArray(); column.forEachValue(v -> { try { writer.value(v.getDouble()); } catch (IOException ex) { throw new DataFrameException("Failed to write DataFrame values for column " + colKey, ex); } }); } else { writer.name("defaultValue").value(defaultValue == null ? null : valuePrinter.apply(defaultValue)); writer.name("values").beginArray(); column.forEachValue(v -> { try { final Object rawValue = v.getValue(); final String stringValue = valuePrinter.apply(rawValue); writer.value(stringValue); } catch (IOException ex) { throw new DataFrameException("Failed to write DataFrame values for column " + colKey, ex); } }); } writer.endArray(); writer.endObject(); } catch (DataFrameException ex) { throw ex; } catch (Exception ex) { throw new DataFrameException("Failed to write DataFrame values for column " + column.key(), ex); } } }
package demo.RealtimeStreaming; import java.util.BitSet; import javax.realtime.AbsoluteTime; import javax.realtime.Affinity; import javax.realtime.AsyncEventHandler; import javax.realtime.Clock; import javax.realtime.PriorityParameters; import javax.realtime.ProcessingGroupParameters; import javax.realtime.ProcessorAffinityException; import javax.realtime.RealtimeThread; import javax.realtime.RelativeTime; import SPRY.DataAllocation.CustomisedDataAllocationPolicy; import SPRY.ExecutionTimeServer.DeferrableServer; import SPRY.Streaming.RealTime.BatchedStream; import SPRY.Streaming.RealTime.Receiver.RealtimeReceiver; /* ************************************************************************************************************** Run using the following commands: sudo taskset -c 0,1,2,3,4,5 jamaicavmm_slim_bin -Xbootclasspath/p:. -Dsun.boot.library.path=. CaseStudyEvaluation * **************************************************************************************************************/ public class CaseStudyEvaluation { public static RealtimeReceiver<Integer> receiver; public static final int MaxBatchSize = 17; public static AbsoluteTime startTime; public static int[] delays = { 5, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25 };//TODO public static long load = 10; //for mac TODO public static int itemId = 0; public static void main(String[] args) throws InterruptedException { Thread.currentThread().setPriority(38); if (args.length > 0) load = Long.parseLong(args[0]); double start = System.currentTimeMillis(); for (int i = 0; i < 3 * load; i++) work(); System.out.println(System.currentTimeMillis()-start); /* start time */ startTime = Clock.getRealtimeClock().getTime(); startTime = startTime.add(new RelativeTime(2000, 0)); createHardRealtimeTasks(startTime); createStreamProcessingTask(); if (Clock.getRealtimeClock().getTime().compareTo(startTime) > 0) System.out.println("start time should be delayed"); } private static void createStreamProcessingTask() { DeferrableServer[] servers = createServers(startTime); receiver = new RealtimeReceiver<Integer>(MaxBatchSize) { @Override public void onStart() {} @Override public void start() {} @Override public void onStop() {} @Override public void stop() {} }; RelativeTime timeout = new RelativeTime(400, 0); /* timeout */ /* Allocates processor 1, 2, 3, 4 */ BitSet affinities = new BitSet(); affinities.set(1); affinities.set(2); affinities.set(3); affinities.set(4); CustomisedDataAllocationPolicy DAP = new CustomisedDataAllocationPolicy(); DAP.addPairs(1, 2, 4, 8, 12, 16); DAP.addPairs(2, 3, 7, 10, 14); DAP.addPairs(3, 1, 5, 9, 13); DAP.addPairs(4, 0, 6, 11, 15); int prologueProessor = 1; BatchedStream<Integer> streaming = new BatchedStream<Integer>( receiver, timeout, 11, p -> p.forEachDeferred(x->{ /* simulate the image generation work load */ for (int i = 0; i < 40 * load; i++) work(); //System.out.println(x + "\t" + Affinity.get(Thread.currentThread()).getProcessors()); return; }), affinities, DAP, prologueProessor, servers ); streaming.start(startTime); AsyncEventHandler latencyMissHandler = new AsyncEventHandler(){ @Override public void handleAsyncEvent() { super.handleAsyncEvent(); System.out.println("Latency Miss"); } }; RealtimeThread sender = new RealtimeThread(){ @Override public void run(){ try { sleep(startTime); /* sleep until startTime */ /* warm up */ for (int i = 0; i < 25; i++) { sleep(new RelativeTime(25, 0)); receiver.store(-1); } sleep(500); streaming.setLatencyMissHandler(new RelativeTime(480, 0), latencyMissHandler); sleep(startTime.add(1600, 0)); RTThreadGenerator.warmupFinished = true; for (int i = 0; i < delays.length; i++) { sleep(new RelativeTime(delays[i], 0)); receiver.store(itemId++); } sleep(1000); System.exit(0); } catch (InterruptedException e) {} } }; /* sender runs on processor 0 */ BitSet senderAffinity = new BitSet(); senderAffinity.set(0); try { Affinity.set(Affinity.generate(senderAffinity), sender); } catch (ProcessorAffinityException e) { } sender.start(); } private static DeferrableServer[] createServers(AbsoluteTime startTime) { DeferrableServer S0 = createServer(startTime, 400, 314, 19); /* 55 --> 19 */ DeferrableServer S1 = createServer(startTime, 400, 317, 18); /* 23 --> 18 */ DeferrableServer S2 = createServer(startTime, 200, 156, 27); /* 71 --> 27 */ DeferrableServer S3 = createServer(startTime, 100, 78, 22); /* 35 --> 22 */ DeferrableServer servers[] = new DeferrableServer[4]; servers[0]=S0;servers[1]=S1;servers[2]=S2;servers[3]=S3; return servers; } private static DeferrableServer createServer(AbsoluteTime startTime, int T, int C, int prioirty) { RelativeTime period, deadline; period = deadline = new RelativeTime(T,0); RelativeTime capacity = new RelativeTime(C,0); ProcessingGroupParameters pgp = new ProcessingGroupParameters(startTime, period, capacity, deadline, null, null); DeferrableServer s = new DeferrableServer(pgp, new PriorityParameters(prioirty), new PriorityParameters(10), new PriorityParameters(37)); return s; } private static void createHardRealtimeTasks(AbsoluteTime start) { /* GAP tasks in processor 1 */ int CPUID = 1; RealtimeThread WeaponRelease = /* 98 --> 28 */ RTThreadGenerator.create(CPUID, 200, load * 3, 28, start, "Weapon Release"); RealtimeThread WeaponAiming = /* 64 --> 24 */ RTThreadGenerator.create(CPUID, 50, load * 3, 24, start, "Weapon Aiming"); RealtimeThread NavUpdate = /* 56 --> 20 */ RTThreadGenerator.create(CPUID, 59, load * 8, 20, start, "Nav Update"); /* GAP tasks in processor 2 */ CPUID = 2; RealtimeThread RaderTrackingFilter = /* 84 --> 28 */ RTThreadGenerator.create(CPUID, 25, load * 2, 28, start, "Rader Tracking Filter"); RealtimeThread DisplayGraphic = /* 40 --> 24 */ RTThreadGenerator.create(CPUID, 80, load * 9, 24, start, "Display Graphic"); RealtimeThread NavSteeringCmds = /* 24 --> 20 */ RTThreadGenerator.create(CPUID, 200, load * 3, 20, start, "Nav Steering Cmds"); /* GAP tasks in processor 3 */ CPUID = 3; RealtimeThread RWRContactMgmt = /* 72 --> 28 */ RTThreadGenerator.create(CPUID, 25, load * 5, 28, start, "RWR Contact Mgmt"); RealtimeThread DisplayStoresUpdate = /* 20 --> 24 */ RTThreadGenerator.create(CPUID, 200, load, 24, start, "Display Stores Update"); RealtimeThread DisplayStatUpdate = /* 12 --> 20 */ RTThreadGenerator.create(CPUID, 200, load * 3, 20, start, "Display Stat Update"); /* GAP tasks in processor 4 */ CPUID = 4; RealtimeThread DataBusPollDevice = /* 68 --> 28 */ RTThreadGenerator.create(CPUID, 40, load, 28, start, "Data Bus Poll Device"); RealtimeThread RadarTargetUpdate = /* 60 --> 26 */ RTThreadGenerator.create(CPUID, 50, load * 5, 26, start, "Radar Target Update"); RealtimeThread DisplayHookUpdate = /* 36 --> 24 */ RTThreadGenerator.create(CPUID, 80, load * 2, 24, start, "Display Hook Update"); RealtimeThread TrackingTargetUpdate = /* 32 --> 20 */ RTThreadGenerator.create(CPUID, 100, load * 5, 20, start, "Tracking Target Update"); RealtimeThread DisplayKeySet = /* 16 --> 18 */ RTThreadGenerator.create(CPUID, 200, load, 18, start, "Display Key Set"); RealtimeThread BETEStatusUpdate = /* 8 --> 16 */ RTThreadGenerator.create(CPUID, 1000, load, 16, start, "BET E Status Update"); RealtimeThread NavStatus = /* 4 --> 14 */ RTThreadGenerator.create(CPUID, 1000, load, 14, start, "Nav Status"); /*start all GAP tasks */ WeaponRelease.start(); WeaponAiming.start(); NavUpdate.start(); RaderTrackingFilter.start(); DisplayGraphic.start(); NavSteeringCmds.start(); RWRContactMgmt.start(); DisplayStoresUpdate.start(); DisplayStatUpdate.start(); DataBusPollDevice.start(); RadarTargetUpdate.start(); DisplayHookUpdate.start(); TrackingTargetUpdate.start(); DisplayKeySet.start(); BETEStatusUpdate.start(); NavStatus.start(); } public static void work() { for (int i = 0; i < 5400; i++) if (999 * 88 + 938857361 / 32 + 876544 > 0) ; } }
package org.cache2k.core.eviction; /*- * #%L * cache2k core implementation * %% * Copyright (C) 2000 - 2022 headissue GmbH, Munich * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.cache2k.operation.Weigher; import org.cache2k.core.Entry; import org.cache2k.core.IntegrityState; /** * Eviction algorithm inspired from CLOCK Pro with 3 clocks. * * <p>Uses a static allocation for hot and cold space sizes. No online or dynamic * optimization is done yet. However, the hit rate for all measured access traces is better * than LRU and resistant to scans. * * <p>From cache2k version 1.2 to version 1.4 the implementation was simplified and the * demotion of hot entries removed. The result achieves similar or better hit rates. * Version 2.4 limits the ghost size to 3000 elements. Version 2.6 stops inserting * evicted hot entries into ghosts. * * <p>The Clock-Pro algorithm is explained by the authors in * <a href="http://www.ece.eng.wayne.edu/~sjiang/pubs/papers/jiang05_CLOCK-Pro.pdf">CLOCK-Pro: * An Effective Improvement of the CLOCK Replacement</a> * and <a href="http://www.slideshare.net/huliang64/clockpro">Clock-Pro: An Effective * Replacement in OS Kernel</a>. * * @author Jens Wilke */ @SuppressWarnings({"WeakerAccess", "rawtypes"}) public class ClockProPlusEviction extends AbstractEviction { static final int MAX_GHOST_ENTRIES = 3_000; static final int GHOST_LOAD_PERCENT = 63; static final int HOT_MAX_PERCENTAGE = 97; static final int HIT_COUNTER_DECREASE_SHIFT = 6; static final int GHOST_MAX_PERCENTAGE = 50; private long hotHits; private long coldHits; private long ghostHits; private long hotRunCnt; private long hotScanCnt; private long coldRunCnt; private long coldScanCnt; private int coldSize; private int hotSize; private Entry handCold; private Entry handHot; private Ghost[] ghosts; private final Ghost ghostHead = new Ghost().shortCircuit(); private int ghostSize = 0; private long hotMax = Long.MAX_VALUE; private long ghostMax = MAX_GHOST_ENTRIES; public ClockProPlusEviction(HeapCacheForEviction heapCache, InternalEvictionListener listener, long maxSize, Weigher weigher, long maxWeight, boolean noChunking) { super(heapCache, listener, maxSize, weigher, maxWeight, noChunking); coldSize = 0; hotSize = 0; handCold = null; handHot = null; ghosts = new Ghost[4]; } private long sumUpListHits(Entry e) { if (e == null) { return 0; } long cnt = 0; Entry head = e; do { cnt += e.hitCnt; e = e.next; } while (e != head); return cnt; } public long getHotMax() { return hotMax; } public long getGhostMax() { return ghostMax; } /** * Updates hot max based on current size. This is called when eviction * kicks in so current size is the maximum size this cache should reach * regardless whether we use entry capacity or weigher to limit the size. */ @Override protected void updateHotMax() { hotMax = getSize() * HOT_MAX_PERCENTAGE / 100; ghostMax = getSize() * GHOST_MAX_PERCENTAGE / 100 + 1; ghostMax = Math.min(MAX_GHOST_ENTRIES, ghostMax); trimGhostSize(); } @Override protected long removeAllFromReplacementList() { Entry e, head; int count = 0; e = head = handCold; long hits = 0; if (e != null) { do { hits += e.hitCnt; Entry next = e.prev; e.removedFromList(); count++; e = next; } while (e != head); coldHits += hits; } handCold = null; coldSize = 0; e = head = handHot; if (e != null) { hits = 0; do { hits += e.hitCnt; Entry next = e.prev; e.removedFromList(); count++; e = next; } while (e != head); hotHits += hits; } handHot = null; hotSize = 0; return count; } /** * Track the entry on the ghost list and call the usual remove procedure. */ @Override public void removeFromReplacementListOnEvict(Entry e) { if (!e.isHot()) { insertCopyIntoGhosts(e); } removeFromReplacementList(e); } /** * Remove, expire or eviction of an entry happens. Remove the entry from the * replacement list data structure. * * <p>Why don't generate ghosts here? If the entry is removed because of * a programmatic remove or expiry we should not occupy any resources. * Removing and expiry may also take place when no eviction is needed at all, * which happens when the cache size did not hit the maximum yet. Producing ghosts * would add additional overhead, when it is not needed. */ @Override protected void removeFromReplacementList(Entry e) { if (e.isHot()) { hotHits += e.hitCnt; handHot = Entry.removeFromCyclicList(handHot, e); hotSize--; } else { coldHits += e.hitCnt; handCold = Entry.removeFromCyclicList(handCold, e); coldSize--; } } private void insertCopyIntoGhosts(Entry e) { int hc = e.hashCode; Ghost g = lookupGhost(hc); if (g != null) { Ghost.moveToFront(ghostHead, g); return; } g = trimGhostSize(); if (g == null) { g = new Ghost(); } g.hash = hc; insertGhost(g, hc); Ghost.insertInList(ghostHead, g); } /** * Reduce ghost size to fit maximum size. * * @return A removed ghost to reuse the object for insert */ private Ghost trimGhostSize() { Ghost g = null; while (ghostSize >= getGhostMax()) { g = ghostHead.prev; Ghost.removeFromList(g); boolean f = removeGhost(g, g.hash); } return g; } public long getSize() { return hotSize + coldSize; } @Override protected void insertIntoReplacementList(Entry e) { Ghost g = lookupGhost(e.hashCode); if (g != null) { /* * don't remove ghosts here, save object allocations. * removeGhost(g, g.hash); Ghost.removeFromList(g); */ ghostHits++; } e.setScanRound(idleScanRound); if (g != null || (coldSize == 0 && hotSize < getHotMax())) { e.setHot(true); hotSize++; handHot = Entry.insertIntoTailCyclicList(handHot, e); return; } coldSize++; handCold = Entry.insertIntoTailCyclicList(handCold, e); } /** * Find idle candidate. * * Idle scanning does scan on the bigger clock first, which is typically * the hot clock, because entries are sitting in the cache for the longest time. */ protected Entry findIdleCandidate(int maxScan) { if (hotSize > coldSize) { Entry e = findIdleInHot(maxScan); return e.hitCnt == 0 ? e : null; } if (coldSize > 0) { return findEvictionCandidate(); } return null; } /** * Entry has hits, reset hit counter and mark */ private void stepOver(Entry e, long hits) { e.hitCnt = hits; e.setScanRound(idleScanRound); } protected Entry findIdleInHot(int maxScan) { hotRunCnt++; int initialMaxScan = maxScan; Entry hand = handHot; while (maxScan-- > 0) { long hitCnt = hand.hitCnt; if (hitCnt == 0) { handHot = hand.next; hotScanCnt += initialMaxScan - maxScan; return hand; } stepOver(hand, 0); hand = hand.next; } hotScanCnt += initialMaxScan - maxScan; handHot = hand; return hand; } private Entry runHandHot() { hotRunCnt++; Entry hand = handHot; Entry coldCandidate = hand; long lowestHits = Long.MAX_VALUE; long hotHits = this.hotHits; int initialMaxScan = (hotSize >> 2) + 1; int maxScan = initialMaxScan; long decrease = ((hand.hitCnt + hand.next.hitCnt) >> HIT_COUNTER_DECREASE_SHIFT) + 1; while (maxScan-- > 0) { long hitCnt = hand.hitCnt; if (hitCnt < lowestHits) { lowestHits = hitCnt; coldCandidate = hand; if (hitCnt == 0) { hand = hand.next; break; } } if (hitCnt < decrease) { hand.hitCnt = 0; stepOver(hand, 0); hotHits += hitCnt; } else { stepOver(hand, hitCnt - decrease); hotHits += decrease; } hand = hand.next; } this.hotHits = hotHits; long scanCount = initialMaxScan - maxScan; hotScanCnt += scanCount; handHot = hand; return coldCandidate; } /** * Runs cold hand and in turn hot hand to find eviction candidate. */ @Override protected Entry findEvictionCandidate() { Entry hand = handCold; if (hotSize > getHotMax() || hand == null) { return runHandHot(); } coldRunCnt++; int scanCnt = 1; if (hand.hitCnt > 0) { Entry evictFromHot = null; do { if (hotSize >= getHotMax() && handHot != null) { evictFromHot = runHandHot(); } coldHits += hand.hitCnt; Entry e = hand; hand = Entry.removeFromCyclicList(e); coldSize--; e.setHot(true); stepOver(e, 0); hotSize++; handHot = Entry.insertIntoTailCyclicList(handHot, e); if (evictFromHot != null) { coldScanCnt += scanCnt; handCold = hand; return evictFromHot; } scanCnt++; } while (hand != null && hand.hitCnt > 0); } coldScanCnt += scanCnt; if (hand == null) { handCold = null; return runHandHot(); } handCold = hand.next; return hand; } @Override protected long getScanCount() { return coldScanCnt + hotScanCnt; } @Override public void checkIntegrity(IntegrityState integrityState) { integrityState.checkEquals("ghostSize == countGhostsInHash()", ghostSize, countGhostsInHash()) .check("checkCyclicListIntegrity(handHot)", Entry.checkCyclicListIntegrity(handHot)) .check("checkCyclicListIntegrity(handCold)", Entry.checkCyclicListIntegrity(handCold)) .checkEquals("getCyclicListEntryCount(handHot) == hotSize", Entry.getCyclicListEntryCount(handHot), hotSize) .checkEquals("getCyclicListEntryCount(handCold) == coldSize", Entry.getCyclicListEntryCount(handCold), coldSize) .checkEquals("Ghost.listSize(ghostHead) == ghostSize", Ghost.listSize(ghostHead), ghostSize); } @Override public String toString() { synchronized (lock) { return super.toString() + ", coldSize=" + coldSize + ", hotSize=" + hotSize + ", hotMaxSize=" + getHotMax() + ", ghostSize=" + ghostSize + ", ghostMaxSize=" + getGhostMax() + ", coldHits=" + (coldHits + sumUpListHits(handCold)) + ", hotHits=" + (hotHits + sumUpListHits(handHot)) + ", ghostHits=" + ghostHits + ", coldRunCnt=" + coldRunCnt + // identical to the evictions anyways ", coldScanCnt=" + coldScanCnt + ", hotRunCnt=" + hotRunCnt + ", hotScanCnt=" + hotScanCnt; } } private Ghost lookupGhost(int hash) { Ghost[] tab = ghosts; int n = tab.length; int mask = n - 1; int idx = hash & (mask); Ghost e = tab[idx]; while (e != null) { if (e.hash == hash) { return e; } e = e.another; } return null; } private void insertGhost(Ghost e2, int hash) { Ghost[] tab = ghosts; int n = tab.length; int mask = n - 1; int idx = hash & (mask); e2.another = tab[idx]; tab[idx] = e2; ghostSize++; int maxFill = n * GHOST_LOAD_PERCENT / 100; if (ghostSize > maxFill) { expand(); } } private void expand() { Ghost[] tab = ghosts; int n = tab.length; int mask; int idx; Ghost[] newTab = new Ghost[n * 2]; mask = newTab.length - 1; for (Ghost g : tab) { while (g != null) { idx = g.hash & mask; Ghost next = g.another; g.another = newTab[idx]; newTab[idx] = g; g = next; } } ghosts = newTab; } private boolean removeGhost(Ghost g, int hash) { Ghost[] tab = ghosts; int n = tab.length; int mask = n - 1; int idx = hash & (mask); Ghost e = tab[idx]; if (e == g) { tab[idx] = e.another; ghostSize--; return true; } else { while (e != null) { Ghost another = e.another; if (another == g) { e.another = another.another; ghostSize--; return true; } e = another; } } return false; } private int countGhostsInHash() { int entryCount = 0; for (Ghost e : ghosts) { while (e != null) { entryCount++; e = e.another; } } return entryCount; } /** * Ghost representing a entry we have seen and evicted from the cache. We only store * the hash to save memory, since holding the key references may cause a size overhead. */ private static class Ghost { /** Modified hashcode of the key */ int hash; /** Hash table chain */ Ghost another; /** LRU double linked list */ Ghost next; /** LRU double linked list */ Ghost prev; Ghost shortCircuit() { return next = prev = this; } static void removeFromList(Ghost e) { e.prev.next = e.next; e.next.prev = e.prev; e.next = e.prev = null; } static void insertInList(Ghost head, Ghost e) { e.prev = head; e.next = head.next; e.next.prev = e; head.next = e; } static void moveToFront(Ghost head, Ghost e) { removeFromList(e); insertInList(head, e); } static int listSize(Ghost head) { int count = 0; Ghost e = head; while ((e = e.next) != head) { count++; } return count; } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.appsync.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Describes an OpenID Connect configuration. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/appsync-2017-07-25/OpenIDConnectConfig" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class OpenIDConnectConfig implements Serializable, Cloneable, StructuredPojo { /** * <p> * The issuer for the OpenID Connect configuration. The issuer returned by discovery must exactly match the value of * <code>iss</code> in the ID token. * </p> */ private String issuer; /** * <p> * The client identifier of the Relying party at the OpenID identity provider. This identifier is typically obtained * when the Relying party is registered with the OpenID identity provider. You can specify a regular expression so * the AWS AppSync can validate against multiple client identifiers at a time. * </p> */ private String clientId; /** * <p> * The number of milliseconds a token is valid after being issued to a user. * </p> */ private Long iatTTL; /** * <p> * The number of milliseconds a token is valid after being authenticated. * </p> */ private Long authTTL; /** * <p> * The issuer for the OpenID Connect configuration. The issuer returned by discovery must exactly match the value of * <code>iss</code> in the ID token. * </p> * * @param issuer * The issuer for the OpenID Connect configuration. The issuer returned by discovery must exactly match the * value of <code>iss</code> in the ID token. */ public void setIssuer(String issuer) { this.issuer = issuer; } /** * <p> * The issuer for the OpenID Connect configuration. The issuer returned by discovery must exactly match the value of * <code>iss</code> in the ID token. * </p> * * @return The issuer for the OpenID Connect configuration. The issuer returned by discovery must exactly match the * value of <code>iss</code> in the ID token. */ public String getIssuer() { return this.issuer; } /** * <p> * The issuer for the OpenID Connect configuration. The issuer returned by discovery must exactly match the value of * <code>iss</code> in the ID token. * </p> * * @param issuer * The issuer for the OpenID Connect configuration. The issuer returned by discovery must exactly match the * value of <code>iss</code> in the ID token. * @return Returns a reference to this object so that method calls can be chained together. */ public OpenIDConnectConfig withIssuer(String issuer) { setIssuer(issuer); return this; } /** * <p> * The client identifier of the Relying party at the OpenID identity provider. This identifier is typically obtained * when the Relying party is registered with the OpenID identity provider. You can specify a regular expression so * the AWS AppSync can validate against multiple client identifiers at a time. * </p> * * @param clientId * The client identifier of the Relying party at the OpenID identity provider. This identifier is typically * obtained when the Relying party is registered with the OpenID identity provider. You can specify a regular * expression so the AWS AppSync can validate against multiple client identifiers at a time. */ public void setClientId(String clientId) { this.clientId = clientId; } /** * <p> * The client identifier of the Relying party at the OpenID identity provider. This identifier is typically obtained * when the Relying party is registered with the OpenID identity provider. You can specify a regular expression so * the AWS AppSync can validate against multiple client identifiers at a time. * </p> * * @return The client identifier of the Relying party at the OpenID identity provider. This identifier is typically * obtained when the Relying party is registered with the OpenID identity provider. You can specify a * regular expression so the AWS AppSync can validate against multiple client identifiers at a time. */ public String getClientId() { return this.clientId; } /** * <p> * The client identifier of the Relying party at the OpenID identity provider. This identifier is typically obtained * when the Relying party is registered with the OpenID identity provider. You can specify a regular expression so * the AWS AppSync can validate against multiple client identifiers at a time. * </p> * * @param clientId * The client identifier of the Relying party at the OpenID identity provider. This identifier is typically * obtained when the Relying party is registered with the OpenID identity provider. You can specify a regular * expression so the AWS AppSync can validate against multiple client identifiers at a time. * @return Returns a reference to this object so that method calls can be chained together. */ public OpenIDConnectConfig withClientId(String clientId) { setClientId(clientId); return this; } /** * <p> * The number of milliseconds a token is valid after being issued to a user. * </p> * * @param iatTTL * The number of milliseconds a token is valid after being issued to a user. */ public void setIatTTL(Long iatTTL) { this.iatTTL = iatTTL; } /** * <p> * The number of milliseconds a token is valid after being issued to a user. * </p> * * @return The number of milliseconds a token is valid after being issued to a user. */ public Long getIatTTL() { return this.iatTTL; } /** * <p> * The number of milliseconds a token is valid after being issued to a user. * </p> * * @param iatTTL * The number of milliseconds a token is valid after being issued to a user. * @return Returns a reference to this object so that method calls can be chained together. */ public OpenIDConnectConfig withIatTTL(Long iatTTL) { setIatTTL(iatTTL); return this; } /** * <p> * The number of milliseconds a token is valid after being authenticated. * </p> * * @param authTTL * The number of milliseconds a token is valid after being authenticated. */ public void setAuthTTL(Long authTTL) { this.authTTL = authTTL; } /** * <p> * The number of milliseconds a token is valid after being authenticated. * </p> * * @return The number of milliseconds a token is valid after being authenticated. */ public Long getAuthTTL() { return this.authTTL; } /** * <p> * The number of milliseconds a token is valid after being authenticated. * </p> * * @param authTTL * The number of milliseconds a token is valid after being authenticated. * @return Returns a reference to this object so that method calls can be chained together. */ public OpenIDConnectConfig withAuthTTL(Long authTTL) { setAuthTTL(authTTL); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getIssuer() != null) sb.append("Issuer: ").append(getIssuer()).append(","); if (getClientId() != null) sb.append("ClientId: ").append(getClientId()).append(","); if (getIatTTL() != null) sb.append("IatTTL: ").append(getIatTTL()).append(","); if (getAuthTTL() != null) sb.append("AuthTTL: ").append(getAuthTTL()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof OpenIDConnectConfig == false) return false; OpenIDConnectConfig other = (OpenIDConnectConfig) obj; if (other.getIssuer() == null ^ this.getIssuer() == null) return false; if (other.getIssuer() != null && other.getIssuer().equals(this.getIssuer()) == false) return false; if (other.getClientId() == null ^ this.getClientId() == null) return false; if (other.getClientId() != null && other.getClientId().equals(this.getClientId()) == false) return false; if (other.getIatTTL() == null ^ this.getIatTTL() == null) return false; if (other.getIatTTL() != null && other.getIatTTL().equals(this.getIatTTL()) == false) return false; if (other.getAuthTTL() == null ^ this.getAuthTTL() == null) return false; if (other.getAuthTTL() != null && other.getAuthTTL().equals(this.getAuthTTL()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getIssuer() == null) ? 0 : getIssuer().hashCode()); hashCode = prime * hashCode + ((getClientId() == null) ? 0 : getClientId().hashCode()); hashCode = prime * hashCode + ((getIatTTL() == null) ? 0 : getIatTTL().hashCode()); hashCode = prime * hashCode + ((getAuthTTL() == null) ? 0 : getAuthTTL().hashCode()); return hashCode; } @Override public OpenIDConnectConfig clone() { try { return (OpenIDConnectConfig) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.appsync.model.transform.OpenIDConnectConfigMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package com.taobao.tddl.dbsync.binlog.event; import java.io.Serializable; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.BitSet; import java.util.Calendar; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.taobao.tddl.dbsync.binlog.LogBuffer; import com.taobao.tddl.dbsync.binlog.LogEvent; /** * Extracting JDBC type & value information from packed rows-buffer. * * @see mysql-5.1.60/sql/log_event.cc - Rows_log_event::print_verbose_one_row * * @author <a href="mailto:changyuan.lh@taobao.com">Changyuan.lh</a> * @version 1.0 */ public final class RowsLogBuffer { protected static final Log logger = LogFactory.getLog(RowsLogBuffer.class); private final LogBuffer buffer; private final int columnLen; private final String charsetName; private Calendar cal; private final BitSet nullBits; private int nullBitIndex; private boolean fNull; private int javaType; private int length; private Serializable value; public RowsLogBuffer(LogBuffer buffer, final int columnLen, String charsetName) { this.buffer = buffer; this.columnLen = columnLen; this.charsetName = charsetName; this.nullBits = new BitSet(columnLen); } /** * Extracting next row from packed buffer. * * @see mysql-5.1.60/sql/log_event.cc - * Rows_log_event::print_verbose_one_row */ public final boolean nextOneRow(BitSet columns) { final boolean hasOneRow = buffer.hasRemaining(); if (hasOneRow) { int column = 0; for (int i = 0; i < columnLen; i++) if (columns.get(i)) column++; nullBitIndex = 0; nullBits.clear(); buffer.fillBitmap(nullBits, column); } return hasOneRow; } /** * Extracting next field value from packed buffer. * * @see mysql-5.1.60/sql/log_event.cc - * Rows_log_event::print_verbose_one_row */ public final Serializable nextValue(final int type, final int meta) { fNull = nullBits.get(nullBitIndex++); if (fNull) { value = null; javaType = mysqlToJavaType(type, meta); length = 0; return null; } else { // Extracting field value from packed buffer. return fetchValue(type, meta); } } /** * Maps the given MySQL type to the correct JDBC type. */ static int mysqlToJavaType(int type, final int meta) { int javaType; if (type == LogEvent.MYSQL_TYPE_STRING) { if (meta >= 256) { int byte0 = meta >> 8; if ((byte0 & 0x30) != 0x30) { /* a long CHAR() field: see #37426 */ type = byte0 | 0x30; } else { switch (byte0) { case LogEvent.MYSQL_TYPE_SET: case LogEvent.MYSQL_TYPE_ENUM: case LogEvent.MYSQL_TYPE_STRING: type = byte0; } } } } switch (type) { case LogEvent.MYSQL_TYPE_LONG: javaType = Types.INTEGER; break; case LogEvent.MYSQL_TYPE_TINY: javaType = Types.TINYINT; break; case LogEvent.MYSQL_TYPE_SHORT: javaType = Types.SMALLINT; break; case LogEvent.MYSQL_TYPE_INT24: javaType = Types.INTEGER; break; case LogEvent.MYSQL_TYPE_LONGLONG: javaType = Types.BIGINT; break; case LogEvent.MYSQL_TYPE_DECIMAL: javaType = Types.DECIMAL; break; case LogEvent.MYSQL_TYPE_NEWDECIMAL: javaType = Types.DECIMAL; break; case LogEvent.MYSQL_TYPE_FLOAT: javaType = Types.REAL; // Types.FLOAT; break; case LogEvent.MYSQL_TYPE_DOUBLE: javaType = Types.DOUBLE; break; case LogEvent.MYSQL_TYPE_BIT: javaType = Types.BIT; break; case LogEvent.MYSQL_TYPE_TIMESTAMP: case LogEvent.MYSQL_TYPE_DATETIME: javaType = Types.TIMESTAMP; break; case LogEvent.MYSQL_TYPE_TIME: javaType = Types.TIME; break; case LogEvent.MYSQL_TYPE_NEWDATE: case LogEvent.MYSQL_TYPE_DATE: case LogEvent.MYSQL_TYPE_YEAR: javaType = Types.DATE; break; case LogEvent.MYSQL_TYPE_ENUM: javaType = Types.INTEGER; break; case LogEvent.MYSQL_TYPE_SET: javaType = Types.BINARY; break; case LogEvent.MYSQL_TYPE_TINY_BLOB: case LogEvent.MYSQL_TYPE_MEDIUM_BLOB: case LogEvent.MYSQL_TYPE_LONG_BLOB: case LogEvent.MYSQL_TYPE_BLOB: if (meta == 1) { javaType = Types.VARBINARY; } else { javaType = Types.LONGVARBINARY; } break; case LogEvent.MYSQL_TYPE_VARCHAR: case LogEvent.MYSQL_TYPE_VAR_STRING: javaType = Types.VARCHAR; break; case LogEvent.MYSQL_TYPE_STRING: javaType = Types.CHAR; break; case LogEvent.MYSQL_TYPE_GEOMETRY: javaType = Types.BINARY; break; default: javaType = Types.OTHER; } return javaType; } /** * Extracting next field value from packed buffer. * * @see mysql-5.1.60/sql/log_event.cc - log_event_print_value */ final Serializable fetchValue(int type, final int meta) { int len = 0; if (type == LogEvent.MYSQL_TYPE_STRING) { if (meta >= 256) { int byte0 = meta >> 8; int byte1 = meta & 0xff; if ((byte0 & 0x30) != 0x30) { /* a long CHAR() field: see #37426 */ len = byte1 | (((byte0 & 0x30) ^ 0x30) << 4); type = byte0 | 0x30; } else { switch (byte0) { case LogEvent.MYSQL_TYPE_SET: case LogEvent.MYSQL_TYPE_ENUM: case LogEvent.MYSQL_TYPE_STRING: type = byte0; len = byte1; break; default: throw new IllegalArgumentException( String.format( "!! Don't know how to handle column type=%d meta=%d (%04X)", type, meta, meta)); } } } else len = meta; } switch (type) { case LogEvent.MYSQL_TYPE_LONG: { // XXX: How to check signed / unsigned? // value = unsigned ? Long.valueOf(buffer.getUint32()) : Integer.valueOf(buffer.getInt32()); value = Integer.valueOf(buffer.getInt32()); javaType = Types.INTEGER; length = 4; break; } case LogEvent.MYSQL_TYPE_TINY: { // XXX: How to check signed / unsigned? // value = Integer.valueOf(unsigned ? buffer.getUint8() : buffer.getInt8()); value = Integer.valueOf(buffer.getInt8()); javaType = Types.TINYINT; // java.sql.Types.INTEGER; length = 1; break; } case LogEvent.MYSQL_TYPE_SHORT: { // XXX: How to check signed / unsigned? // value = Integer.valueOf(unsigned ? buffer.getUint16() : buffer.getInt16()); value = Integer.valueOf((short) buffer.getInt16()); javaType = Types.SMALLINT; // java.sql.Types.INTEGER; length = 2; break; } case LogEvent.MYSQL_TYPE_INT24: { // XXX: How to check signed / unsigned? // value = Integer.valueOf(unsigned ? buffer.getUint24() : buffer.getInt24()); value = Integer.valueOf(buffer.getInt24()); javaType = Types.INTEGER; length = 3; break; } case LogEvent.MYSQL_TYPE_LONGLONG: { // XXX: How to check signed / unsigned? // value = unsigned ? buffer.getUlong64()) : Long.valueOf(buffer.getLong64()); value = Long.valueOf(buffer.getLong64()); javaType = Types.BIGINT; // Types.INTEGER; length = 8; break; } case LogEvent.MYSQL_TYPE_DECIMAL: { /* * log_event.h : This enumeration value is only used internally and * cannot exist in a binlog. */ logger.warn("MYSQL_TYPE_DECIMAL : This enumeration value is " + "only used internally and cannot exist in a binlog!"); javaType = Types.DECIMAL; value = null; /* unknown format */ length = 0; break; } case LogEvent.MYSQL_TYPE_NEWDECIMAL: { final int precision = meta >> 8; final int decimals = meta & 0xff; value = buffer.getDecimal(precision, decimals); javaType = Types.DECIMAL; length = precision; break; } case LogEvent.MYSQL_TYPE_FLOAT: { value = Float.valueOf(buffer.getFloat32()); javaType = Types.REAL; // Types.FLOAT; length = 4; break; } case LogEvent.MYSQL_TYPE_DOUBLE: { value = Double.valueOf(buffer.getDouble64()); javaType = Types.DOUBLE; length = 8; break; } case LogEvent.MYSQL_TYPE_BIT: { /* Meta-data: bit_len, bytes_in_rec, 2 bytes */ final int nbits = ((meta >> 8) * 8) + (meta & 0xff); len = (nbits + 7) / 8; if (nbits > 1) { byte[] bits = new byte[len]; buffer.fillBytes(bits, 0, len); value = bits; } else { final int bit = buffer.getInt8(); value = (bit != 0) ? Boolean.TRUE : Boolean.FALSE; } javaType = Types.BIT; length = nbits; break; } case LogEvent.MYSQL_TYPE_TIMESTAMP: { final long i32 = buffer.getUint32(); value = new Timestamp(i32 * 1000); javaType = Types.TIMESTAMP; length = 4; break; } case LogEvent.MYSQL_TYPE_DATETIME: { final long i64 = buffer.getLong64(); /* YYYYMMDDhhmmss */ final int d = (int) (i64 / 1000000); final int t = (int) (i64 % 1000000); if (cal == null) cal = Calendar.getInstance(); cal.clear(); /* month is 0-based, 0 for january. */ cal.set(d / 10000, (d % 10000) / 100 - 1, d % 100, t / 10000, (t % 10000) / 100, t % 100); value = new Timestamp(cal.getTimeInMillis()); javaType = Types.TIMESTAMP; length = 8; break; } case LogEvent.MYSQL_TYPE_TIME: { final int i32 = buffer.getUint24(); if (cal == null) cal = Calendar.getInstance(); cal.clear(); cal.set(70, 0, 1, i32 / 10000, (i32 % 10000) / 100, i32 % 100); value = new Time(cal.getTimeInMillis()); javaType = Types.TIME; length = 3; break; } case LogEvent.MYSQL_TYPE_NEWDATE: { /* * log_event.h : This enumeration value is only used internally and * cannot exist in a binlog. */ logger.warn("MYSQL_TYPE_NEWDATE : This enumeration value is " + "only used internally and cannot exist in a binlog!"); javaType = Types.DATE; value = null; /* unknown format */ length = 0; break; } case LogEvent.MYSQL_TYPE_DATE: { final int i32 = buffer.getUint24(); if (cal == null) cal = Calendar.getInstance(); cal.clear(); /* month is 0-based, 0 for january. */ cal.set((i32 / (16 * 32)), (i32 / 32 % 16) - 1, (i32 % 32)); value = new java.sql.Date(cal.getTimeInMillis()); javaType = Types.DATE; length = 3; break; } case LogEvent.MYSQL_TYPE_YEAR: { final int i32 = buffer.getUint8(); if (cal == null) cal = Calendar.getInstance(); cal.clear(); cal.set(Calendar.YEAR, i32 + 1900); // If connection property 'YearIsDateType' has // set, value is java.sql.Date. value = new java.sql.Date(cal.getTimeInMillis()); javaType = Types.DATE; // Types.INTEGER; length = 1; break; } case LogEvent.MYSQL_TYPE_ENUM: { final int int32; /* * log_event.h : This enumeration value is only used internally and * cannot exist in a binlog. */ switch (len) { case 1: int32 = buffer.getUint8(); break; case 2: int32 = buffer.getUint16(); break; default: throw new IllegalArgumentException( "!! Unknown ENUM packlen = " + len); } logger.warn("MYSQL_TYPE_ENUM : This enumeration value is " + "only used internally and cannot exist in a binlog!"); value = Integer.valueOf(int32); javaType = Types.INTEGER; length = len; break; } case LogEvent.MYSQL_TYPE_SET: { /* * log_event.h : This enumeration value is only used internally and * cannot exist in a binlog. */ byte[] nbits = new byte[len]; buffer.fillBytes(nbits, 0, len); logger.warn("MYSQL_TYPE_SET : This enumeration value is " + "only used internally and cannot exist in a binlog!"); value = nbits; javaType = Types.BINARY; // Types.INTEGER; length = len; break; } case LogEvent.MYSQL_TYPE_TINY_BLOB: { /* * log_event.h : This enumeration value is only used internally and * cannot exist in a binlog. */ logger.warn("MYSQL_TYPE_TINY_BLOB : This enumeration value is " + "only used internally and cannot exist in a binlog!"); } case LogEvent.MYSQL_TYPE_MEDIUM_BLOB: { /* * log_event.h : This enumeration value is only used internally and * cannot exist in a binlog. */ logger.warn("MYSQL_TYPE_MEDIUM_BLOB : This enumeration value is " + "only used internally and cannot exist in a binlog!"); } case LogEvent.MYSQL_TYPE_LONG_BLOB: { /* * log_event.h : This enumeration value is only used internally and * cannot exist in a binlog. */ logger.warn("MYSQL_TYPE_LONG_BLOB : This enumeration value is " + "only used internally and cannot exist in a binlog!"); } case LogEvent.MYSQL_TYPE_BLOB: { /* * BLOB or TEXT datatype */ switch (meta) { case 1: { /* TINYBLOB/TINYTEXT */ final int len8 = buffer.getUint8(); byte[] binary = new byte[len8]; buffer.fillBytes(binary, 0, len8); value = binary; javaType = Types.VARBINARY; length = len8; break; } case 2: { /* BLOB/TEXT */ final int len16 = buffer.getUint16(); byte[] binary = new byte[len16]; buffer.fillBytes(binary, 0, len16); value = binary; javaType = Types.LONGVARBINARY; length = len16; break; } case 3: { /* MEDIUMBLOB/MEDIUMTEXT */ final int len24 = buffer.getUint24(); byte[] binary = new byte[len24]; buffer.fillBytes(binary, 0, len24); value = binary; javaType = Types.LONGVARBINARY; length = len24; break; } case 4: { /* LONGBLOB/LONGTEXT */ final int len32 = (int) buffer.getUint32(); byte[] binary = new byte[len32]; buffer.fillBytes(binary, 0, len32); value = binary; javaType = Types.LONGVARBINARY; length = len32; break; } default: throw new IllegalArgumentException( "!! Unknown BLOB packlen = " + meta); } break; } case LogEvent.MYSQL_TYPE_VARCHAR: case LogEvent.MYSQL_TYPE_VAR_STRING: { /* * Except for the data length calculation, MYSQL_TYPE_VARCHAR, * MYSQL_TYPE_VAR_STRING and MYSQL_TYPE_STRING are handled the * same way. */ len = meta; if (len < 256) { len = buffer.getUint8(); } else { len = buffer.getUint16(); } value = buffer.getFixString(len, charsetName); javaType = Types.VARCHAR; length = len; break; } case LogEvent.MYSQL_TYPE_STRING: { if (len < 256) { len = buffer.getUint8(); } else { len = buffer.getUint16(); } value = buffer.getFixString(len, charsetName); javaType = Types.CHAR; // Types.VARCHAR; length = len; break; } case LogEvent.MYSQL_TYPE_GEOMETRY: { /* * MYSQL_TYPE_GEOMETRY: copy from BLOB or TEXT */ switch (meta) { case 1: len = buffer.getUint8(); break; case 2: len = buffer.getUint16(); break; case 3: len = buffer.getUint24(); break; case 4: len = (int) buffer.getUint32(); break; default: throw new IllegalArgumentException( "!! Unknown MYSQL_TYPE_GEOMETRY packlen = " + meta); } /* fill binary */ byte[] binary = new byte[len]; buffer.fillBytes(binary, 0, len); /* Warning unsupport cloumn type */ logger.warn(String.format( "!! Unsupport column type MYSQL_TYPE_GEOMETRY: meta=%d (%04X), len = %d", meta, meta, len)); javaType = Types.BINARY; value = binary; length = len; break; } default: logger.error(String.format( "!! Don't know how to handle column type=%d meta=%d (%04X)", type, meta, meta)); javaType = Types.OTHER; value = null; length = 0; } return value; } public final boolean isNull() { return fNull; } public final int getJavaType() { return javaType; } public final Serializable getValue() { return value; } public final int getLength() { return length; } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * Copyright (C) 2011-2012 Eugene Fradkin (eugene.fradkin@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.tasks.ui.nativetool; import org.eclipse.core.runtime.IStatus; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.jkiss.code.NotNull; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.app.DBPProject; import org.jkiss.dbeaver.model.connection.DBPNativeClientLocation; import org.jkiss.dbeaver.model.connection.LocalNativeClientLocation; import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.dbeaver.model.task.DBTTask; import org.jkiss.dbeaver.registry.task.TaskPreferenceStore; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.dbeaver.tasks.nativetool.AbstractNativeToolSettings; import org.jkiss.dbeaver.tasks.ui.nativetool.internal.TaskNativeUIMessages; import org.jkiss.dbeaver.tasks.ui.wizard.TaskConfigurationWizard; import org.jkiss.dbeaver.tasks.ui.wizard.TaskWizardExecutor; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.utils.CommonUtils; import java.lang.reflect.InvocationTargetException; import java.util.Collection; import java.util.List; /** * Abstract wizard */ public abstract class AbstractNativeToolWizard<SETTINGS extends AbstractNativeToolSettings<BASE_OBJECT>, BASE_OBJECT extends DBSObject, PROCESS_ARG> extends TaskConfigurationWizard<SETTINGS> { private static final Log log = Log.getLog(AbstractNativeToolWizard.class); private final DBPPreferenceStore preferenceStore; private final SETTINGS settings; protected String taskTitle; protected final NativeToolWizardPageLog logPage; private boolean finished; private String errorMessage; protected AbstractNativeToolWizard(@NotNull Collection<BASE_OBJECT> databaseObjects, @NotNull String taskTitle) { this.preferenceStore = DBWorkbench.getPlatform().getPreferenceStore(); this.settings = createSettings(); this.settings.getDatabaseObjects().addAll(databaseObjects); this.taskTitle = taskTitle; this.logPage = new NativeToolWizardPageLog(taskTitle); } public AbstractNativeToolWizard(@NotNull DBTTask task) { super(task); this.preferenceStore = new TaskPreferenceStore(task); this.settings = createSettings(); this.taskTitle = task.getType().getName(); this.logPage = new NativeToolWizardPageLog(taskTitle); } protected abstract SETTINGS createSettings(); @Override public SETTINGS getSettings() { return settings; } @NotNull protected DBPPreferenceStore getPreferenceStore() { return preferenceStore; } public DBPProject getProject() { if (settings.getDataSourceContainer() != null) { return settings.getDataSourceContainer().getProject(); } return super.getProject(); } @Override protected String getDefaultWindowTitle() { return taskTitle; } @Override public boolean canFinish() { if (!super.canFinish()) { return false; } if (isSingleTimeWizard()) { return !finished; } // [#2917] Finish button is always enabled (!finished && super.canFinish()) return true; } /** * @return true if this wizard can be executed only once */ protected boolean isSingleTimeWizard() { return false; } @Override public void createPageControls(Composite pageContainer) { try { settings.loadSettings(UIUtils.getDefaultRunnableContext(), getPreferenceStore()); } catch (DBException e) { DBWorkbench.getPlatformUI().showError("Settings load", "Error loading wizard settings", e); } super.createPageControls(pageContainer); updateErrorMessage(); } void updateErrorMessage() { WizardPage currentPage = (WizardPage) getStartingPage(); if (isNativeClientHomeRequired()) { String clientHomeId = getSettings().getDataSourceContainer().getConnectionConfiguration().getClientHomeId(); List<DBPNativeClientLocation> nativeClientLocations = getSettings().getDataSourceContainer().getDriver().getNativeClientLocations(); if (CommonUtils.isEmpty(clientHomeId)) { if (nativeClientLocations != null && !nativeClientLocations.isEmpty()) { settings.setClientHome(nativeClientLocations.get(0)); } else { settings.setClientHome(null); } if (settings.getClientHome() == null) { currentPage.setErrorMessage(TaskNativeUIMessages.tools_wizard_message_no_client_home); getContainer().updateMessage(); return; } } else { DBPNativeClientLocation clientHome = DBUtils.findObject(nativeClientLocations, clientHomeId); if (clientHome == null) { clientHome = getSettings().findNativeClientHome(clientHomeId); } if (clientHome == null) { // Make local client home from location clientHome = new LocalNativeClientLocation(clientHomeId, clientHomeId); } settings.setClientHome(clientHome); } if (settings.getClientHome() == null) { currentPage.setErrorMessage(NLS.bind(TaskNativeUIMessages.tools_wizard_message_client_home_not_found, clientHomeId)); } else { currentPage.setErrorMessage(null); } getContainer().updateMessage(); } } private boolean validateClientFiles() { if (!isNativeClientHomeRequired() || settings.getClientHome() == null) { return true; } try { UIUtils.run(getContainer(), true, true, monitor -> { try { settings.getClientHome().validateFilesPresence(monitor); } catch (DBException e) { throw new InvocationTargetException(e); } }); } catch (InvocationTargetException e) { DBWorkbench.getPlatformUI().showError("Download native client file(s)", "Error downloading client file(s)", e.getTargetException()); ((WizardPage) getContainer().getCurrentPage()).setErrorMessage("Error downloading native client file(s)"); getContainer().updateMessage(); return false; } catch (InterruptedException e) { // ignore return false; } return true; } @Override public boolean performFinish() { // Save settings settings.saveSettings(getRunnableContext(), getPreferenceStore()); if (!validateClientFiles()) { return false; } if (getCurrentTask() != null) { return super.performFinish(); } showLogPage(); try { // Execute directly - without task serialize/deserialize // We need it because some data producers cannot be serialized properly (e.g. ResultSetDatacontainer - see #7342) DBTTask temporaryTask = getProject().getTaskManager().createTemporaryTask(getTaskType(), getTaskType().getName()); saveConfigurationToTask(temporaryTask); TaskWizardExecutor executor = new TaskWizardExecutor(getRunnableContext(), temporaryTask, log, logPage.getLogWriter()); executor.executeTask(); return false; } catch (Exception e) { DBWorkbench.getPlatformUI().showError(e.getMessage(), "Error running task", e); return false; } } protected void showLogPage() { if (getContainer().getCurrentPage() != logPage) { getContainer().showPage(logPage); } } protected void notifyToolFinish(String toolName, long workTime) { // Make a sound Display.getCurrent().beep(); // Notify agent if (workTime > DBWorkbench.getPlatformUI().getLongOperationTimeout() * 1000) { DBWorkbench.getPlatformUI().notifyAgent(toolName, IStatus.INFO); } } public String getObjectsName() { StringBuilder str = new StringBuilder(); // for (BASE_OBJECT object : settings.getDatabaseObjects()) { // if (str.length() > 0) str.append(","); // str.append(object.getName()); // } return str.toString(); } protected boolean isNativeClientHomeRequired() { return true; } public boolean isVerbose() { return false; } protected void onSuccess(long workTime) { } protected void onError() { UIUtils.showMessageBox( getShell(), taskTitle, errorMessage == null ? "Internal error" : errorMessage, SWT.ICON_ERROR); } }
/////////////////////////////////////////////////////////////////////////// // __ _ _ ________ // // / / ____ ____ _(_)____/ | / / ____/ // // / / / __ \/ __ `/ / ___/ |/ / / __ // // / /___/ /_/ / /_/ / / /__/ /| / /_/ / // // /_____/\____/\__, /_/\___/_/ |_/\____/ // // /____/ // // // // The Next Generation Logic Library // // // /////////////////////////////////////////////////////////////////////////// // // // Copyright 2015-20xx Christoph Zengler // // // // Licensed under the Apache License, Version 2.0 (the "License"); // // you may not use this file except in compliance with the License. // // You may obtain a copy of the License at // // // // http://www.apache.org/licenses/LICENSE-2.0 // // // // Unless required by applicable law or agreed to in writing, software // // distributed under the License is distributed on an "AS IS" BASIS, // // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // // implied. See the License for the specific language governing // // permissions and limitations under the License. // // // /////////////////////////////////////////////////////////////////////////// package org.logicng.datastructures.ubtrees; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; /** * A data structure for storing sets with efficient sub- and superset queries. * C.f. `A New Method to Index and Query Sets`, Hoffmann and Koehler, 1999 * @param <T> the type of the elements (must be comparable) * @version 2.0.0 * @since 1.5.0 */ public final class UBTree<T extends Comparable<T>> { private final SortedMap<T, UBNode<T>> rootNodes; /** * Constructs an empty UBTree. */ public UBTree() { this.rootNodes = new TreeMap<>(); } /** * Adds a set of comparable objects to this UBTree. * @param set the set of comparable objects */ public void addSet(final SortedSet<T> set) { SortedMap<T, UBNode<T>> nodes = this.rootNodes; UBNode<T> node = null; for (final T element : set) { node = nodes.get(element); if (node == null) { node = new UBNode<>(element); nodes.put(element, node); } nodes = node.children(); } if (node != null) { node.setEndSet(set); } } /** * Returns the first subset of a given set in this UBTree. * @param set the set to search for * @return the first subset which is found for the given set or {@code null} if there is none */ public SortedSet<T> firstSubset(final SortedSet<T> set) { if (this.rootNodes.isEmpty() || set == null || set.isEmpty()) { return null; } return firstSubset(set, this.rootNodes); } /** * Returns all subsets of a given set in this UBTree. * @param set the set to search for * @return all subsets of of the given set */ public Set<SortedSet<T>> allSubsets(final SortedSet<T> set) { final Set<SortedSet<T>> subsets = new LinkedHashSet<>(); allSubsets(set, this.rootNodes, subsets); return subsets; } /** * Returns all supersets of a given set in this UBTree. * @param set the set to search for * @return all supersets of the given set */ public Set<SortedSet<T>> allSupersets(final SortedSet<T> set) { final Set<SortedSet<T>> supersets = new LinkedHashSet<>(); allSupersets(set, this.rootNodes, supersets); return supersets; } /** * Returns all sets in this UBTree. * @return all sets in this UBTree */ public Set<SortedSet<T>> allSets() { final List<UBNode<T>> allEndOfPathNodes = getAllEndOfPathNodes(this.rootNodes); final Set<SortedSet<T>> allSets = new LinkedHashSet<>(); for (final UBNode<T> endOfPathNode : allEndOfPathNodes) { allSets.add(endOfPathNode.set()); } return allSets; } /** * Returns all root nodes of this UBTree. * @return all root nodes of this UBTree */ SortedMap<T, UBNode<T>> rootNodes() { return this.rootNodes; } private SortedSet<T> firstSubset(final SortedSet<T> set, final SortedMap<T, UBNode<T>> forest) { final Set<UBNode<T>> nodes = getAllNodesContainingElements(set, forest); SortedSet<T> foundSubset = null; for (final UBNode<T> node : nodes) { if (foundSubset != null) { return foundSubset; } if (node.isEndOfPath()) { return node.set(); } final SortedSet<T> remainingSet = new TreeSet<>(set); remainingSet.remove(set.first()); foundSubset = firstSubset(remainingSet, node.children()); } return foundSubset; } private void allSubsets(final SortedSet<T> set, final SortedMap<T, UBNode<T>> forest, final Set<SortedSet<T>> subsets) { final Set<UBNode<T>> nodes = getAllNodesContainingElements(set, forest); for (final UBNode<T> node : nodes) { if (node.isEndOfPath()) { subsets.add(node.set()); } final SortedSet<T> remainingSet = new TreeSet<>(set); remainingSet.remove(set.first()); allSubsets(remainingSet, node.children(), subsets); } } private void allSupersets(final SortedSet<T> set, final SortedMap<T, UBNode<T>> forest, final Set<SortedSet<T>> supersets) { final Set<UBNode<T>> nodes = getAllNodesContainingElementsLessThan(set, forest, set.first()); for (final UBNode<T> node : nodes) { allSupersets(set, node.children(), supersets); } for (final UBNode<T> node : forest.values()) { if (node.element().equals(set.first())) { final SortedSet<T> remainingSet = new TreeSet<>(set); remainingSet.remove(set.first()); if (!remainingSet.isEmpty()) { allSupersets(remainingSet, node.children(), supersets); } else { final List<UBNode<T>> allEndOfPathNodes = getAllEndOfPathNodes(node.children()); if (node.isEndOfPath()) { allEndOfPathNodes.add(node); } for (final UBNode<T> endOfPathNode : allEndOfPathNodes) { supersets.add(endOfPathNode.set()); } } } } } private Set<UBNode<T>> getAllNodesContainingElements(final SortedSet<T> set, final SortedMap<T, UBNode<T>> forest) { final Set<UBNode<T>> nodes = new LinkedHashSet<>(); for (final T element : set) { final UBNode<T> node = forest.get(element); if (node != null) { nodes.add(node); } } return nodes; } private Set<UBNode<T>> getAllNodesContainingElementsLessThan(final SortedSet<T> set, final SortedMap<T, UBNode<T>> forest, final T element) { final Set<UBNode<T>> nodes = new LinkedHashSet<>(); for (final UBNode<T> node : forest.values()) { if (node != null && node.element().compareTo(element) < 0) { nodes.add(node); } } return nodes; } private List<UBNode<T>> getAllEndOfPathNodes(final SortedMap<T, UBNode<T>> forest) { final List<UBNode<T>> endOfPathNodes = new ArrayList<>(); getAllEndOfPathNodes(forest, endOfPathNodes); return endOfPathNodes; } private void getAllEndOfPathNodes(final SortedMap<T, UBNode<T>> forest, final List<UBNode<T>> endOfPathNodes) { for (final UBNode<T> node : forest.values()) { if (node.isEndOfPath()) { endOfPathNodes.add(node); } getAllEndOfPathNodes(node.children(), endOfPathNodes); } } }
/* Copyright (c) 2014, Aalborg University All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the <organization> nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.smartcampus.android.ui.maps; import java.util.ArrayList; import java.util.List; //import org.joda.time.DateTime; import com.smartcampus.R; import com.smartcampus.android.ui.data.Building_FloorAdapter; import com.smartcampus.android.ui.data.VertexAdapter; import com.smartcampus.indoormodel.AbsoluteLocation; import com.smartcampus.indoormodel.Building; import com.smartcampus.indoormodel.Building_Floor; import com.smartcampus.indoormodel.graph.Edge; import com.smartcampus.indoormodel.graph.IGraph; import com.smartcampus.indoormodel.graph.Vertex; import com.smartcampus.javascript.JSInterface; import com.smartcampus.javascript.DeviceInterface; import android.annotation.SuppressLint; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.res.Configuration; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.location.Location; import android.os.Bundle; import android.os.Handler; import android.view.Display; import android.view.ViewGroup.LayoutParams; import android.webkit.WebView; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ImageView.ScaleType; import android.util.Log; public abstract class WebMap2D extends Activity { private static final String TAG = WebMap2D.class.getName(); //callback for when the map has been loaded. protected class MapReadyReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { if (mCurrentBuilding != null) { JSInterface.centerAt(webView, mCurrentBuilding.getLatitude(), mCurrentBuilding.getLongitude()); } WebMap2D.this.refreshUI(); } } public enum ViewType { MAP, SATELLITE, STREET, TRAFFIC; /** * @param index Index of desired infotype value * @return Returns the type corresponding to the specified index. * If no type is found, the default (NONE) is returned. */ public static ViewType getValue(int index) { int i = 0; for (ViewType vt : ViewType.values()) { if (i == index) return vt; i++; } return MAP; } } //Denotes the last known location estimate - regardless of provider protected static Location lastKnownLocation; private static final String DEFAULT_TILE_URL = "http://beacon.uhk.cz/webview/index.html"; protected WebView webView; //private Button button1; //private Button button2; /* Start of Map2D */ protected static final String emptyTitle = "No Title"; protected static final String emptyDescription = "No Description"; protected static Building mCurrentBuilding; protected static IGraph mGraph; protected static int mCurrentSelectedFloor = 0; protected static ViewType mCurrentViewType = ViewType.MAP; //TODO: FIGURE OUT IF WE CAN DO WITHOUT THE MAPVIEW //protected MapView mMapView; //When pressed will display the current (i.e., estimated) floor (only visible in online mode) //protected ImageButton mCurrentFloorBtn; protected static String mCurrentTileUrl; public static final String IS_MAP_READY = "com.smartcampus.android.ui.maps.WebMap2D.IS_MAP_READY"; //For efficient bitmap private static int calculateInSampleSize( BitmapFactory.Options options, int reqWidth, int reqHeight) { // Raw height and width of image final int height = options.outHeight; final int width = options.outWidth; int inSampleSize = 1; if (height > reqHeight || width > reqWidth) { if (width > height) { inSampleSize = Math.round((float)height / (float)reqHeight); } else { inSampleSize = Math.round((float)width / (float)reqWidth); } } return inSampleSize; } //For efficient bitmap private static Bitmap decodeSampledBitmapFromResource(Resources res, int resId, int reqWidth, int reqHeight) { // First decode with inJustDecodeBounds=true to check dimensions final BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeResource(res, resId, options); // Calculate inSampleSize options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; return BitmapFactory.decodeResource(res, resId, options); } protected ImageButton mTrackPositionBtn; /** SPLASH SECTION START **/ protected Dialog mSplashDialog; //Receiver for when map tiles have been loaded (called from javascript - another thread) private MapReadyReceiver mMapReadyReceiver; //Indicates whether the floor changer dialog should contain a 'current' floor protected abstract boolean addCurrentFloorToFloorChangerDialog(); //This method is called to show edges (IFF edges should be shown) protected void addEdgeOverlay(int floorNum) { List<Edge> edges = null; if (mGraph != null && showEdges()) { //This could be changed to a route in the online phase (so akin to getVisibleVertices() edges = getVisibleEdges(floorNum); //mGraph.getEdges(floorNum); } JSInterface.showEdges(webView, edges, floorNum); } //NOTE: Consider refactoring //Change name to 'ShowVertices' //This method is called to show the vertices protected void addGraphOverlay(int floorNum) { List<Vertex> verts = null; if (mGraph != null) { verts = getVisibleVertices(mGraph.getVertices(floorNum)); } JSInterface.showVertices(webView, verts, floorNum, isOnline()); } protected void changeToFloor(int newFloor) { if (mGraph != null) { mCurrentSelectedFloor = newFloor; refreshUI(); } } //Clear map overlays protected void clearOverlays() { JSInterface.clearOverlays(webView); } //Concatenates building name and floor name, to give an overall indication of where a user is. protected String concatBuildingAndFloorName(int floorNum) { if (mCurrentBuilding == null) return " Unknown building "; String building_name = mCurrentBuilding.getName(); if (building_name == null) return "Unknown Building"; else { Building_Floor floor = mCurrentBuilding.getFloorAtFloorNumber(floorNum); String floor_name; if (floor == null) floor_name = "Unknown floor"; else floor_name = floor.getFloorName() != null ? floor.getFloorName() : "floor #" + floor.getFloorNumber(); return building_name + " - " + floor_name; } } /** * Creates the 'Change floor' dialog which allows a user to select a different floor. * @return A dialog displaying available floors. */ protected AlertDialog createFloorChangerDialog() { AlertDialog.Builder builder = new AlertDialog.Builder(this); //Add available floors ArrayList<Building_Floor> floors = new ArrayList<Building_Floor>(); if (mCurrentBuilding == null || (!mCurrentBuilding.hasFloors())) { builder.setTitle("No available floors"); floors = new ArrayList<Building_Floor>(); //empty list } else { builder.setTitle("Choose a new floor. \n (Current floor: " + getCurrentFloor() + ")"); Iterable<Building_Floor> curFloors = mCurrentBuilding.getFloors(); for (Building_Floor bf : curFloors) { floors.add(bf); } } //Add floors to adapter final ArrayList<Building_Floor> finalFloors = floors; builder.setAdapter(new Building_FloorAdapter(this, floors), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int item) { changeToFloor(finalFloors.get(item).getFloorNumber()); } }); return builder.create(); } private LinearLayout createSplashLayout() { try { LinearLayout lLayout = new LinearLayout(this); lLayout.setOrientation(LinearLayout.VERTICAL); LayoutParams layout = new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT); lLayout.setLayoutParams(layout); Display display = getWindowManager().getDefaultDisplay(); int screenWidth = display.getWidth(); int screenHeight = display.getHeight(); ImageView imgView = new ImageView(this); imgView.setImageBitmap(decodeSampledBitmapFromResource(getResources(), R.drawable.startskaerm_light, (int)(screenWidth * 1.3), (int)(screenHeight * 1.3))); imgView.setScaleType(ScaleType.CENTER_CROP); //imgView.setAdjustViewBounds(true); // set the ImageView bounds to match the Drawable's dimensions imgView.setLayoutParams(layout); //.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT)); lLayout.addView(imgView); return lLayout; } catch (Exception ex) { return null; } } protected AlertDialog createViewDialog() { //We present the user with different options for annotating the edge AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Choose view"); String[] viewOptions = new String[4]; viewOptions[0] = "Map view"; viewOptions[1] = "Satellite view"; viewOptions[2] = "Street view"; viewOptions[3] = "Traffic view"; builder.setItems(viewOptions, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { mCurrentViewType = ViewType.getValue(item); updateViewType(); } }); return builder.create(); } //Create the 'What's nearby' dialog public AlertDialog createWhatsNearbyDialog() { List<Vertex> floorVerts = new ArrayList<Vertex>(); if (mGraph != null) { floorVerts = getVisibleVertices( mGraph.getVertices(mCurrentSelectedFloor)); } //HACK: This MIGHT happen (e.g., floor number is 0, but we don't have any building floors on 0) //We return an empty list (no POIs) //A better approach would be to disallow non-existing floor numbers altogether or at least diable the 'show nearby' button. if (floorVerts == null) floorVerts = new ArrayList<Vertex>(); final List<Vertex> verticesOnCurrentFloorList = floorVerts; AlertDialog.Builder whatsNearbyDialogBuilder = new AlertDialog.Builder(this); whatsNearbyDialogBuilder.setTitle("Nearby places of interest"); whatsNearbyDialogBuilder.setAdapter(new VertexAdapter(this, verticesOnCurrentFloorList), new android.content.DialogInterface.OnClickListener() { //@Override public void onClick(DialogInterface dialog, int which) { Vertex selectedVertex = verticesOnCurrentFloorList.get(which); AbsoluteLocation absLoc = selectedVertex.getLocation().getAbsoluteLocation(); JSInterface.centerAt(webView, absLoc.getLatitude(), absLoc.getLongitude()); //We might insert a javascript call here that calls onTap in return in order to bring up the info-window subclassHook_WhatsNearbyDialog(); } }); return whatsNearbyDialogBuilder.create(); } public Building getBuilding() { return mCurrentBuilding; } /** * Returns the 'current floor' * In the online phase, this means the floor of the estimated position. * In the offline phase, this means the manually selected floor * @return */ public abstract int getCurrentFloor(); //The default behavior is to show all the graph's edges. //However, in the online phase, we only want to show a route. protected List<Edge> getVisibleEdges(int floorNum) { List<Edge> result = null; if (mGraph != null) result = mGraph.getEdges(floorNum); return result; } //TODO: Change the API, so it looks more like getVisibleEdges() protected abstract List<Vertex> getVisibleVertices(List<Vertex> vertices); //used to tell javascript if we are in the online phase //default is false, override in online mode protected boolean isOnline() { return false; } protected void loadTiles() { //TODO:IFC change from using ifc to tile url //this is just for a quick test: //Note: The Json webclient CURRENTLY sets ifcUrl to "null" if it is null. StringBuilder sbUrl = new StringBuilder(); if (mCurrentBuilding != null && mCurrentBuilding.getIfcUrl() != null && mCurrentBuilding.getIfcUrl() != "null") { sbUrl.append(mCurrentBuilding.getIfcUrl()); } else { sbUrl.append(DEFAULT_TILE_URL); } //Avoid caching sbUrl.append("?t=").append(System.currentTimeMillis()); String url = sbUrl.toString(); Log.d(TAG, "WebView loadUrl: "+url); webView.loadUrl(url); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //This could be handled more gracefully than a type check //FOLIA specific - Show 'Folia Indoor Mapping' splashscreen /* if (this instanceof com.smartcampus.android.ui.maps.online.WebMap2DOnline) { showSplashScreen(); } */ setContentView(R.layout.web_map); webView = (WebView)findViewById(R.id.web_map); setupWebView(); loadTiles(); IntentFilter mapReadyFilter; mapReadyFilter = new IntentFilter(IS_MAP_READY); mMapReadyReceiver = createMapReadyReceiver(); //new MapReadyReceiver(); registerReceiver(mMapReadyReceiver, mapReadyFilter); //refreshUI(); } /** * All screens (but one) handle map load the same: * A building has been loaded, now center at the building's coordinates. * In the WebMap2DSelectBuildingLocation we don't yet have a building to center at * (that is what we are creating) so we handle it differently there. * @return */ protected MapReadyReceiver createMapReadyReceiver() { return new MapReadyReceiver(); } /** SPLASH SECTION END **/ @Override protected void onDestroy() { super.onDestroy(); try { unregisterReceiver(mMapReadyReceiver); } catch (Exception ex) {} } @Override public void onRestart() { super.onRestart(); refreshUI(); } /** * This method is called (from javascript) whenever the user taps a marker on the map. * @param floorNum The current floor * @param vertexId The id of the selected (tapped) vertex. */ public abstract void onTap(int floorNum, int vertexId); /* @Override public void onResume() { super.onResume(); JavaScriptInterface jsi = JavaScriptInterface.getInstance(); jsi.setTarget(this); } */ /** * This method refreshes the UI, i.e., updates tiles and title corresponding to a given floor */ protected void refreshUI() { if (mCurrentBuilding != null) { //This MAY (theoretically) have a shallow building if (mCurrentBuilding.getGraphModel() != null) { setTitle(concatBuildingAndFloorName(mCurrentSelectedFloor)); this.updateOverlays(mCurrentSelectedFloor); } } } protected void removeSplashScreen() { if (mSplashDialog != null) { mSplashDialog.dismiss(); mSplashDialog = null; } } public void setMapReady() { sendBroadcast(new Intent(IS_MAP_READY)); } /** * This method is called (from javascript) whenever the user taps on the map - not a marker. * @param isOnline Indicates whether we are in the online phase * @param floor the current floor * @param lat the latitude of the tapped location * @param lon the longitude of the tapped location */ public abstract void setSelectedLocation(boolean isOnline, int floor, double lat, double lon); //Replace a string level with an int level @SuppressLint("SetJavaScriptEnabled") private void setupWebView() { webView.getSettings().setJavaScriptEnabled(true); //JSInterface webView.addJavascriptInterface(new DeviceInterface(this), "DeviceInterface"); webView.clearCache(true); } /** * Indicates whether we desire to show the edges of the graph. * @return The default value is true */ protected boolean showEdges() { return true; } protected void showSplashScreen() { LinearLayout layout = createSplashLayout(); if (layout == null) return; mSplashDialog = new Dialog(this, R.style.SplashScreen); mSplashDialog.setContentView(layout); mSplashDialog.setCancelable(false); mSplashDialog.show(); // Set Runnable to remove splash screen just in case final Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { removeSplashScreen(); } }, 6000); } protected void subclassHook_WhatsNearbyDialog() {} protected void updateOverlays(int floorNum) { clearOverlays(); //Show edges (if they should be shown): addEdgeOverlay(floorNum); //Show vertices: addGraphOverlay(floorNum); } private void updateViewType() { JSInterface.updateViewType(webView, mCurrentViewType); } }
package com.mtt.myapp.common.util; import java.io.*; import java.nio.charset.Charset; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Stack; import java.util.jar.JarInputStream; import java.util.jar.Manifest; import java.util.zip.ZipEntry; import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import com.mtt.myapp.common.excpetion.CustomRuntimeException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Compression utility. * * @author Mavlarn * */ public abstract class CompressionUtil { private static final Logger LOGGER = LoggerFactory.getLogger(CompressionUtil.class); /** * Upzip the given file on the same folder. * * @param zippedFile * zipped file * @throws java.io.IOException * IOException */ public static void unzip(File zippedFile) throws IOException { unzip(zippedFile, Charset.defaultCharset().name()); } /** * Unzip the given zipped file with given character set. * * @param zippedFile * zipped file * @param charsetName * character set */ public static void unzip(File zippedFile, String charsetName) { unzip(zippedFile, zippedFile.getParentFile(), charsetName); } /** * Unzip the given zipped file into destination directory. * * @param zippedFile * zipped file * @param destDir * destination directory */ public static void unzip(File zippedFile, File destDir) { try { unzip(new FileInputStream(zippedFile), destDir, Charset.defaultCharset().name()); } catch (FileNotFoundException e) { throw new CustomRuntimeException(e.getMessage(), e); } } /** * Unzip the given zipped file into destination directory with the given character set. * * @param zippedFile * zipped file * @param destDir * destination directory * @param charsetName * character set name */ public static void unzip(File zippedFile, File destDir, String charsetName) { try { unzip(new FileInputStream(zippedFile), destDir, charsetName); } catch (FileNotFoundException e) { throw new CustomRuntimeException(e.getMessage(), e); } } /** * Unzip the given input stream into destination directory with the default character set. * * @param is * input stream * @param destDir * destination directory */ public static void unzip(InputStream is, File destDir) { unzip(is, destDir, Charset.defaultCharset().name()); } /** * Unzip the given input stream into destination directory with the given character set. * * @param is * input stream * @param destDir * destination directory * @param charsetName * character set name */ public static void unzip(InputStream is, File destDir, String charsetName) { ZipArchiveInputStream zis = null; try { ZipArchiveEntry entry; String name; File target; int nWritten = 0; BufferedOutputStream bos; byte[] buf = new byte[1024 * 8]; zis = new ZipArchiveInputStream(is, charsetName, false); while ((entry = zis.getNextZipEntry()) != null) { name = entry.getName(); target = new File(destDir, name); if (entry.isDirectory()) { target.mkdirs(); /* does it always work? */ } else { target.createNewFile(); bos = new BufferedOutputStream(new FileOutputStream(target)); while ((nWritten = zis.read(buf)) >= 0) { bos.write(buf, 0, nWritten); } bos.close(); } } } catch (Exception e) { throw new CustomRuntimeException(e.getMessage(), e); } finally { IOUtils.closeQuietly(is); IOUtils.closeQuietly(zis); } } /** * Compresses the given file(or dir) and creates new file under the same directory. * * @param src * file or directory * @throws java.io.IOException * IOException */ public static void zip(File src) throws IOException { zip(src, Charset.defaultCharset().name(), true); } /** * Zips the given file(or dir) and create. * * @param src * file or directory to compress * @param includeSrc * if true and src is directory, then src is not included in the compression. if * false, src is included. * @throws java.io.IOException * IOException */ public static void zip(File src, boolean includeSrc) throws IOException { zip(src, Charset.defaultCharset().name(), includeSrc); } /** * Compresses the given src file (or directory) with the given encoding. * * @param src * src * @param charSetName * character set * @param includeSrc * true if sub-directory will be zipped as well. * @throws java.io.IOException * IOException */ public static void zip(File src, String charSetName, boolean includeSrc) throws IOException { zip(src, src.getParentFile(), charSetName, includeSrc); } /** * Compresses the given src file(or directory) and writes to the given output stream with sub * directory. * * @param src * src * @param os * output stream * * @throws java.io.IOException * IOException */ public static void zip(File src, OutputStream os) throws IOException { zip(src, os, Charset.defaultCharset().name(), true); } /** * Compresses the given src file(or directory) and create the compressed file under the given * destDir. * * @param src * src to be zipped. * @param destDir * destination directory * @param charSetName * character set to be used * @param includeSrc * true if sub-directory will be zipped as well. * @throws java.io.IOException * IOException */ public static void zip(File src, File destDir, String charSetName, boolean includeSrc) throws IOException { String fileName = src.getName(); if (!src.isDirectory()) { int pos = fileName.lastIndexOf("."); if (pos > 0) { fileName = fileName.substring(0, pos); } } fileName += ".zip"; File zippedFile = new File(destDir, fileName); if (!zippedFile.exists()) { zippedFile.createNewFile(); } zip(src, new FileOutputStream(zippedFile), charSetName, includeSrc); } /** * Zip the given src into the given output stream. * * @param src * src to be zipped * @param os * output stream * @param charsetName * character set to be used * @param includeSrc * true if src will be included. * @throws java.io.IOException * IOException */ public static void zip(File src, OutputStream os, String charsetName, boolean includeSrc) throws IOException { ZipArchiveOutputStream zos = new ZipArchiveOutputStream(os); zos.setEncoding(charsetName); FileInputStream fis; int length; ZipArchiveEntry ze; byte[] buf = new byte[8 * 1024]; String name; Stack<File> stack = new Stack<File>(); File root; if (src.isDirectory()) { if (includeSrc) { stack.push(src); root = src.getParentFile(); } else { File[] fs = src.listFiles(); for (int i = 0; i < fs.length; i++) { stack.push(fs[i]); } root = src; } } else { stack.push(src); root = src.getParentFile(); } while (!stack.isEmpty()) { File f = stack.pop(); name = toPath(root, f); if (f.isDirectory()) { File[] fs = f.listFiles(); for (int i = 0; i < fs.length; i++) { if (fs[i].isDirectory()) { stack.push(fs[i]); } else { stack.add(0, fs[i]); } } } else { ze = new ZipArchiveEntry(name); zos.putArchiveEntry(ze); fis = new FileInputStream(f); while ((length = fis.read(buf, 0, buf.length)) >= 0) { zos.write(buf, 0, length); } fis.close(); zos.closeArchiveEntry(); } } zos.close(); } private static String toPath(File root, File dir) { String path = dir.getAbsolutePath(); path = path.substring(root.getAbsolutePath().length()).replace(File.separatorChar, '/'); if (path.startsWith("/")) { path = path.substring(1); } if (dir.isDirectory() && !path.endsWith("/")) { path += "/"; } return path; } public static final List<String> EXECUTABLE_EXTENSION = Arrays.asList("bat", "sh"); /** * Untar an input file into an output file. * * The output file is created in the output folder, having the same name as the input file, * minus the '.tar' extension. * * @param inFile * the input .tar file * @param outputDir * the output directory file. * @throws java.io.IOException * @throws java.io.FileNotFoundException * * @return The {@link java.util.List} of {@link java.io.File}s with the untared content. * @throws ArchiveException */ public static List<File> untar(final File inFile, final File outputDir) { final List<File> untaredFiles = new LinkedList<File>(); InputStream is = null; TarArchiveInputStream debInputStream = null; try { is = new FileInputStream(inFile); debInputStream = (TarArchiveInputStream) new ArchiveStreamFactory() .createArchiveInputStream("tar", is); TarArchiveEntry entry = null; while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) { final File outputFile = new File(outputDir, entry.getName()); if (entry.isDirectory()) { if (!outputFile.exists()) { if (!outputFile.mkdirs()) { throw new IllegalStateException(String.format("Couldn't create directory %s.", outputFile.getAbsolutePath())); } } } else { File parentFile = outputFile.getParentFile(); if (!parentFile.exists()) { parentFile.mkdirs(); } final OutputStream outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(debInputStream, outputFileStream); outputFileStream.close(); if (FilenameUtils.isExtension(outputFile.getName(), EXECUTABLE_EXTENSION)) { outputFile.setExecutable(true, true); } outputFile.setReadable(true); outputFile.setWritable(true, true); } untaredFiles.add(outputFile); } debInputStream.close(); } catch (Exception e) { LOGGER.error("Error while untar {} file by {}", inFile, e.getMessage()); LOGGER.debug("Trace is : ", e); throw new CustomRuntimeException("Error while untar file", e); } finally { IOUtils.closeQuietly(is); IOUtils.closeQuietly(debInputStream); } return untaredFiles; } /** * Ungzip the given file. * * @param inFile * file * @param outFile * to * @return ungzipped file. */ public static File ungzip(final File inFile, final File outFile) { FileInputStream fin = null; BufferedInputStream in = null; FileOutputStream fout = null; GzipCompressorInputStream gzIn = null; try { fin = new FileInputStream(inFile); in = new BufferedInputStream(fin); gzIn = new GzipCompressorInputStream(in); if (!outFile.getParentFile().exists()) { outFile.getParentFile().mkdirs(); } fout = new FileOutputStream(outFile); final byte[] buffer = new byte[4048]; int n = 0; while (-1 != (n = gzIn.read(buffer))) { fout.write(buffer, 0, n); } } catch (Exception e) { LOGGER.error("Error while ungzip {} file by {}", inFile, e.getMessage()); LOGGER.debug("Trace is : ", e); throw new CustomRuntimeException("Error while ungzip file", e); } finally { IOUtils.closeQuietly(fin); IOUtils.closeQuietly(in); IOUtils.closeQuietly(fout); IOUtils.closeQuietly(gzIn); } return outFile; } /** * Unpack the given jar file. * * @param jarFile * file to be uncompressed * @param destDir * destination directory * @throws java.io.IOException * occurs when IO has a problem. */ public static void unjar(File jarFile, String destDir) throws IOException { File dest = new File(destDir); if (!dest.exists()) { dest.mkdirs(); } if (!dest.isDirectory()) { LOGGER.error("Destination must be a directory."); throw new IOException("Destination must be a directory."); } JarInputStream jin = new JarInputStream(new FileInputStream(jarFile)); byte[] buffer = new byte[1024]; ZipEntry entry = jin.getNextEntry(); while (entry != null) { String fileName = entry.getName(); if (fileName.charAt(fileName.length() - 1) == '/') { fileName = fileName.substring(0, fileName.length() - 1); } if (fileName.charAt(0) == '/') { fileName = fileName.substring(1); } if (File.separatorChar != '/') { fileName = fileName.replace('/', File.separatorChar); } File file = new File(dest, fileName); if (entry.isDirectory()) { // make sure the directory exists file.mkdirs(); jin.closeEntry(); } else { // make sure the directory exists File parent = file.getParentFile(); if (parent != null && !parent.exists()) { parent.mkdirs(); } // dump the file OutputStream out = new FileOutputStream(file); int len = 0; while ((len = jin.read(buffer, 0, buffer.length)) != -1) { out.write(buffer, 0, len); } out.flush(); IOUtils.closeQuietly(out); jin.closeEntry(); file.setLastModified(entry.getTime()); } entry = jin.getNextEntry(); } Manifest mf = jin.getManifest(); if (mf != null) { File file = new File(dest, "META-INF/MANIFEST.MF"); File parent = file.getParentFile(); if (!parent.exists()) { parent.mkdirs(); } OutputStream out = new FileOutputStream(file); mf.write(out); out.flush(); IOUtils.closeQuietly(out); } IOUtils.closeQuietly(jin); } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Describes an import instance task. * </p> */ public class ImportInstanceTaskDetails implements Serializable, Cloneable { /** * One or more volumes. */ private com.amazonaws.internal.ListWithAutoConstructFlag<ImportInstanceVolumeDetailItem> volumes; /** * The ID of the instance. */ private String instanceId; /** * The instance operating system. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>Windows */ private String platform; /** * A description of the task. */ private String description; /** * One or more volumes. * * @return One or more volumes. */ public java.util.List<ImportInstanceVolumeDetailItem> getVolumes() { if (volumes == null) { volumes = new com.amazonaws.internal.ListWithAutoConstructFlag<ImportInstanceVolumeDetailItem>(); volumes.setAutoConstruct(true); } return volumes; } /** * One or more volumes. * * @param volumes One or more volumes. */ public void setVolumes(java.util.Collection<ImportInstanceVolumeDetailItem> volumes) { if (volumes == null) { this.volumes = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<ImportInstanceVolumeDetailItem> volumesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<ImportInstanceVolumeDetailItem>(volumes.size()); volumesCopy.addAll(volumes); this.volumes = volumesCopy; } /** * One or more volumes. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setVolumes(java.util.Collection)} or {@link * #withVolumes(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param volumes One or more volumes. * * @return A reference to this updated object so that method calls can be chained * together. */ public ImportInstanceTaskDetails withVolumes(ImportInstanceVolumeDetailItem... volumes) { if (getVolumes() == null) setVolumes(new java.util.ArrayList<ImportInstanceVolumeDetailItem>(volumes.length)); for (ImportInstanceVolumeDetailItem value : volumes) { getVolumes().add(value); } return this; } /** * One or more volumes. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param volumes One or more volumes. * * @return A reference to this updated object so that method calls can be chained * together. */ public ImportInstanceTaskDetails withVolumes(java.util.Collection<ImportInstanceVolumeDetailItem> volumes) { if (volumes == null) { this.volumes = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<ImportInstanceVolumeDetailItem> volumesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<ImportInstanceVolumeDetailItem>(volumes.size()); volumesCopy.addAll(volumes); this.volumes = volumesCopy; } return this; } /** * The ID of the instance. * * @return The ID of the instance. */ public String getInstanceId() { return instanceId; } /** * The ID of the instance. * * @param instanceId The ID of the instance. */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * The ID of the instance. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param instanceId The ID of the instance. * * @return A reference to this updated object so that method calls can be chained * together. */ public ImportInstanceTaskDetails withInstanceId(String instanceId) { this.instanceId = instanceId; return this; } /** * The instance operating system. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>Windows * * @return The instance operating system. * * @see PlatformValues */ public String getPlatform() { return platform; } /** * The instance operating system. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>Windows * * @param platform The instance operating system. * * @see PlatformValues */ public void setPlatform(String platform) { this.platform = platform; } /** * The instance operating system. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>Windows * * @param platform The instance operating system. * * @return A reference to this updated object so that method calls can be chained * together. * * @see PlatformValues */ public ImportInstanceTaskDetails withPlatform(String platform) { this.platform = platform; return this; } /** * The instance operating system. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>Windows * * @param platform The instance operating system. * * @see PlatformValues */ public void setPlatform(PlatformValues platform) { this.platform = platform.toString(); } /** * The instance operating system. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>Windows * * @param platform The instance operating system. * * @return A reference to this updated object so that method calls can be chained * together. * * @see PlatformValues */ public ImportInstanceTaskDetails withPlatform(PlatformValues platform) { this.platform = platform.toString(); return this; } /** * A description of the task. * * @return A description of the task. */ public String getDescription() { return description; } /** * A description of the task. * * @param description A description of the task. */ public void setDescription(String description) { this.description = description; } /** * A description of the task. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param description A description of the task. * * @return A reference to this updated object so that method calls can be chained * together. */ public ImportInstanceTaskDetails withDescription(String description) { this.description = description; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVolumes() != null) sb.append("Volumes: " + getVolumes() + ","); if (getInstanceId() != null) sb.append("InstanceId: " + getInstanceId() + ","); if (getPlatform() != null) sb.append("Platform: " + getPlatform() + ","); if (getDescription() != null) sb.append("Description: " + getDescription() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVolumes() == null) ? 0 : getVolumes().hashCode()); hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode()); hashCode = prime * hashCode + ((getPlatform() == null) ? 0 : getPlatform().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ImportInstanceTaskDetails == false) return false; ImportInstanceTaskDetails other = (ImportInstanceTaskDetails)obj; if (other.getVolumes() == null ^ this.getVolumes() == null) return false; if (other.getVolumes() != null && other.getVolumes().equals(this.getVolumes()) == false) return false; if (other.getInstanceId() == null ^ this.getInstanceId() == null) return false; if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false) return false; if (other.getPlatform() == null ^ this.getPlatform() == null) return false; if (other.getPlatform() != null && other.getPlatform().equals(this.getPlatform()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; return true; } @Override public ImportInstanceTaskDetails clone() { try { return (ImportInstanceTaskDetails) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * / */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.List; import java.util.ArrayList; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.resource.PlacementConstraint; import org.apache.hadoop.yarn.api.resource.PlacementConstraints; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * In memory implementation of the {@link PlacementConstraintManagerService}. */ @InterfaceAudience.Private @InterfaceStability.Unstable public class MemoryPlacementConstraintManager extends PlacementConstraintManagerService { private static final Logger LOG = LoggerFactory.getLogger(MemoryPlacementConstraintManager.class); private ReentrantReadWriteLock.ReadLock readLock; private ReentrantReadWriteLock.WriteLock writeLock; /** * Stores the global constraints that will be manipulated by the cluster * admin. The key of each entry is the tag that will enable the corresponding * constraint. */ private Map<String, PlacementConstraint> globalConstraints; /** * Stores the constraints for each application, along with the allocation tags * that will enable each of the constraints for a given application. */ private Map<ApplicationId, Map<String, PlacementConstraint>> appConstraints; public MemoryPlacementConstraintManager() { this.globalConstraints = new HashMap<>(); this.appConstraints = new HashMap<>(); ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); readLock = lock.readLock(); writeLock = lock.writeLock(); } @Override protected void serviceInit(Configuration conf) throws Exception { super.serviceInit(conf); } @Override public void registerApplication(ApplicationId appId, Map<Set<String>, PlacementConstraint> constraintMap) { // Check if app already exists. If not, prepare its constraint map. Map<String, PlacementConstraint> constraintsForApp = new HashMap<>(); readLock.lock(); try { if (appConstraints.get(appId) != null) { LOG.warn("Application {} has already been registered.", appId); return; } // Go over each sourceTag-constraint pair, validate it, and add it to the // constraint map for this app. for (Map.Entry<Set<String>, PlacementConstraint> entry : constraintMap .entrySet()) { Set<String> sourceTags = entry.getKey(); PlacementConstraint constraint = entry.getValue(); if (validateConstraint(sourceTags, constraint)) { String sourceTag = getValidSourceTag(sourceTags); constraintsForApp.put(sourceTag, constraint); } } } finally { readLock.unlock(); } if (constraintsForApp.isEmpty()) { LOG.info("Application {} was registered, but no constraints were added.", appId); } // Update appConstraints. writeLock.lock(); try { appConstraints.put(appId, constraintsForApp); } finally { writeLock.unlock(); } } @Override public void addConstraint(ApplicationId appId, Set<String> sourceTags, PlacementConstraint placementConstraint, boolean replace) { writeLock.lock(); try { Map<String, PlacementConstraint> constraintsForApp = appConstraints.get(appId); if (constraintsForApp == null) { LOG.info("Cannot add constraint to application {}, as it has not " + "been registered yet.", appId); return; } addConstraintToMap(constraintsForApp, sourceTags, placementConstraint, replace); } finally { writeLock.unlock(); } } @Override public void addGlobalConstraint(Set<String> sourceTags, PlacementConstraint placementConstraint, boolean replace) { writeLock.lock(); try { addConstraintToMap(globalConstraints, sourceTags, placementConstraint, replace); } finally { writeLock.unlock(); } } /** * Helper method that adds a constraint to a map for a given source tag. * Assumes there is already a lock on the constraint map. * * @param constraintMap constraint map to which the constraint will be added * @param sourceTags the source tags that will enable this constraint * @param placementConstraint the new constraint to be added * @param replace if true, an existing constraint for these sourceTags will be * replaced with the new one */ private void addConstraintToMap( Map<String, PlacementConstraint> constraintMap, Set<String> sourceTags, PlacementConstraint placementConstraint, boolean replace) { if (validateConstraint(sourceTags, placementConstraint)) { String sourceTag = getValidSourceTag(sourceTags); if (constraintMap.get(sourceTag) == null || replace) { if (replace) { LOG.info("Replacing the constraint associated with tag {} with {}.", sourceTag, placementConstraint); } constraintMap.put(sourceTag, placementConstraint); } else { LOG.info("Constraint {} will not be added. There is already a " + "constraint associated with tag {}.", placementConstraint, sourceTag); } } } @Override public Map<Set<String>, PlacementConstraint> getConstraints( ApplicationId appId) { readLock.lock(); try { if (appConstraints.get(appId) == null) { LOG.debug("Application {} is not registered in the Placement " + "Constraint Manager.", appId); return null; } // Copy to a new map and return an unmodifiable version of it. // Each key of the map is a set with a single source tag. Map<Set<String>, PlacementConstraint> constraintMap = appConstraints.get(appId).entrySet().stream() .collect(Collectors.toMap( e -> Stream.of(e.getKey()).collect(Collectors.toSet()), e -> e.getValue())); return Collections.unmodifiableMap(constraintMap); } finally { readLock.unlock(); } } @Override public PlacementConstraint getConstraint(ApplicationId appId, Set<String> sourceTags) { if (!validateSourceTags(sourceTags)) { return null; } String sourceTag = getValidSourceTag(sourceTags); readLock.lock(); try { if (appConstraints.get(appId) == null) { LOG.debug("Application {} is not registered in the Placement " + "Constraint Manager.", appId); return null; } // TODO: Merge this constraint with the global one for this tag, if one // exists. return appConstraints.get(appId).get(sourceTag); } finally { readLock.unlock(); } } @Override public PlacementConstraint getGlobalConstraint(Set<String> sourceTags) { if (!validateSourceTags(sourceTags)) { return null; } String sourceTag = getValidSourceTag(sourceTags); readLock.lock(); try { return globalConstraints.get(sourceTag); } finally { readLock.unlock(); } } @Override public PlacementConstraint getMultilevelConstraint(ApplicationId appId, Set<String> sourceTags, PlacementConstraint schedulingRequestConstraint) { List<PlacementConstraint> constraints = new ArrayList<>(); // Add scheduling request-level constraint. if (schedulingRequestConstraint != null) { constraints.add(schedulingRequestConstraint); } // Add app-level constraint if appId is given. if (appId != null && sourceTags != null && !sourceTags.isEmpty()) { constraints.add(getConstraint(appId, sourceTags)); } // Add global constraint. if (sourceTags != null && !sourceTags.isEmpty()) { constraints.add(getGlobalConstraint(sourceTags)); } // Remove all null or duplicate constraints. List<PlacementConstraint.AbstractConstraint> allConstraints = constraints.stream() .filter(placementConstraint -> placementConstraint != null && placementConstraint.getConstraintExpr() != null) .map(PlacementConstraint::getConstraintExpr) .distinct() .collect(Collectors.toList()); // Compose an AND constraint // When merge request(RC), app(AC) and global constraint(GC), // we do a merge on them with CC=AND(GC, AC, RC) and returns a // composite AND constraint. Subsequently we check if CC could // be satisfied. This ensures that every level of constraint // is satisfied. PlacementConstraint.And andConstraint = PlacementConstraints.and( allConstraints.toArray(new PlacementConstraint .AbstractConstraint[allConstraints.size()])); return andConstraint.build(); } @Override public void unregisterApplication(ApplicationId appId) { writeLock.lock(); try { appConstraints.remove(appId); } finally { writeLock.unlock(); } } @Override public void removeGlobalConstraint(Set<String> sourceTags) { if (!validateSourceTags(sourceTags)) { return; } String sourceTag = getValidSourceTag(sourceTags); writeLock.lock(); try { globalConstraints.remove(sourceTag); } finally { writeLock.unlock(); } } @Override public int getNumRegisteredApplications() { readLock.lock(); try { return appConstraints.size(); } finally { readLock.unlock(); } } @Override public int getNumGlobalConstraints() { readLock.lock(); try { return globalConstraints.size(); } finally { readLock.unlock(); } } }
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.backend.converters.tostunner.tasks; import org.eclipse.bpmn2.Bpmn2Factory; import org.eclipse.bpmn2.Task; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.stunner.bpmn.backend.converters.Result; import org.kie.workbench.common.stunner.bpmn.backend.converters.tostunner.BpmnNode; import org.kie.workbench.common.stunner.bpmn.backend.converters.tostunner.properties.TaskPropertyReader; import org.kie.workbench.common.stunner.bpmn.backend.converters.tostunner.properties.UserTaskPropertyReader; import org.kie.workbench.common.stunner.bpmn.definition.NoneTask; import org.kie.workbench.common.stunner.bpmn.definition.UserTask; import org.kie.workbench.common.stunner.bpmn.definition.property.assignee.Actors; import org.kie.workbench.common.stunner.bpmn.definition.property.background.BackgroundSet; import org.kie.workbench.common.stunner.bpmn.definition.property.dataio.AssignmentsInfo; import org.kie.workbench.common.stunner.bpmn.definition.property.dimensions.RectangleDimensionsSet; import org.kie.workbench.common.stunner.bpmn.definition.property.font.FontSet; import org.kie.workbench.common.stunner.bpmn.definition.property.simulation.SimulationSet; import org.kie.workbench.common.stunner.bpmn.definition.property.task.ScriptTypeListValue; import org.kie.workbench.common.stunner.core.graph.Edge; import org.kie.workbench.common.stunner.core.graph.Node; import org.kie.workbench.common.stunner.core.graph.content.Bounds; import org.kie.workbench.common.stunner.core.graph.content.view.View; import org.kie.workbench.common.stunner.core.marshaller.MarshallingRequest; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class TaskConverterTest extends BaseTaskConverterTest { private static final String NAME = "NAME"; private static final String TASK_NAME = "TASK_NAME"; private static final Actors ACTORS = new Actors(); private static final String GROUP_ID = "GROUP_ID"; private static final String DOCUMENTATION = "DOCUMENTATION"; private static final Boolean IS_ASYNC = Boolean.TRUE; private static final Boolean SKIPPABLE = Boolean.TRUE; private static final String PRIORITY = "PRIORITY"; private static final String SUBJECT = "SUBJECT"; private static final String DESCRIPTION = "DESCRIPTION"; private static final String CREATED_BY = "CREATED_BY"; private static final Boolean IS_AUTOSTART = Boolean.TRUE; private static final Boolean SEQUENTIAL = Boolean.TRUE; private static final String COLLECTION_INPUT = "COLLECTION_INPUT"; private static final String COLLECTION_OUTPUT = "COLLECTION_OUTPUT"; private static final String DATA_INPUT = "DATA_INPUT"; private static final String DATA_OUTPUT = "DATA_OUTPUT"; private static final String COMPLETION_CONDITION = "COMPLETION_CONDITION"; private static final ScriptTypeListValue ON_ENTRY_ACTION = new ScriptTypeListValue(); private static final ScriptTypeListValue ON_EXIT_ACTION = new ScriptTypeListValue(); private static final String CONTENT = "CONTENT"; private static final String SLA_DUE_DATE = "SLA_DUE_DATE"; private static final Bounds BOUNDS = Bounds.create(); private static final SimulationSet SIMULATION_SET = new SimulationSet(); private static final RectangleDimensionsSet RECTANGLE_DIMENSIONS_SET = new RectangleDimensionsSet(); private static final FontSet FONT_SET = new FontSet(); private static final BackgroundSet BACKGROUND_SET = new BackgroundSet(); private static final AssignmentsInfo ASSIGNMENTS_INFO = new AssignmentsInfo(); @Mock private org.eclipse.bpmn2.UserTask userTask; @Mock private Node<View<UserTask>, Edge> userTaskNode; @Mock private View<UserTask> userTaskContent; private UserTask userTaskDefinition; @Mock private UserTaskPropertyReader userTaskPropertyReader; @Override public void setUp() { super.setUp(); userTaskDefinition = new UserTask(); when(factoryManager.newNode(any(), eq(UserTask.class))).thenReturn(userTaskNode); when(userTaskNode.getContent()).thenReturn(userTaskContent); when(userTaskNode.getContent().getDefinition()).thenReturn(userTaskDefinition); when(propertyReaderFactory.of(userTask)).thenReturn(userTaskPropertyReader); when(userTaskPropertyReader.getName()).thenReturn(NAME); when(userTaskPropertyReader.getDocumentation()).thenReturn(DOCUMENTATION); when(userTaskPropertyReader.getSimulationSet()).thenReturn(SIMULATION_SET); when(userTaskPropertyReader.getBounds()).thenReturn(BOUNDS); when(userTaskPropertyReader.getRectangleDimensionsSet()).thenReturn(RECTANGLE_DIMENSIONS_SET); when(userTaskPropertyReader.getBackgroundSet()).thenReturn(BACKGROUND_SET); when(userTaskPropertyReader.getFontSet()).thenReturn(FONT_SET); when(userTaskPropertyReader.getTaskName()).thenReturn(TASK_NAME); when(userTaskPropertyReader.getActors()).thenReturn(ACTORS); when(userTaskPropertyReader.getGroupid()).thenReturn(GROUP_ID); when(userTaskPropertyReader.getAssignmentsInfo()).thenReturn(ASSIGNMENTS_INFO); when(userTaskPropertyReader.isAsync()).thenReturn(IS_ASYNC); when(userTaskPropertyReader.isSkippable()).thenReturn(SKIPPABLE); when(userTaskPropertyReader.getPriority()).thenReturn(PRIORITY); when(userTaskPropertyReader.getSubject()).thenReturn(SUBJECT); when(userTaskPropertyReader.getDescription()).thenReturn(DESCRIPTION); when(userTaskPropertyReader.getCreatedBy()).thenReturn(CREATED_BY); when(userTaskPropertyReader.isAdHocAutostart()).thenReturn(IS_AUTOSTART); when(userTaskPropertyReader.isSequential()).thenReturn(SEQUENTIAL); when(userTaskPropertyReader.getCollectionInput()).thenReturn(COLLECTION_INPUT); when(userTaskPropertyReader.getCollectionOutput()).thenReturn(COLLECTION_OUTPUT); when(userTaskPropertyReader.getDataInput()).thenReturn(DATA_INPUT); when(userTaskPropertyReader.getDataOutput()).thenReturn(DATA_OUTPUT); when(userTaskPropertyReader.getCompletionCondition()).thenReturn(COMPLETION_CONDITION); when(userTaskPropertyReader.getOnEntryAction()).thenReturn(ON_ENTRY_ACTION); when(userTaskPropertyReader.getOnExitAction()).thenReturn(ON_EXIT_ACTION); when(userTaskPropertyReader.getContent()).thenReturn(CONTENT); when(userTaskPropertyReader.getSLADueDate()).thenReturn(SLA_DUE_DATE); } @Override protected BaseTaskConverter createTaskConverter() { return new TaskConverter(factoryManager, propertyReaderFactory, MarshallingRequest.Mode.AUTO); } @Test public void testConvertUserTaskMI() { when(userTaskPropertyReader.isMultipleInstance()).thenReturn(true); BpmnNode node = (BpmnNode) tested.convert(userTask).value(); UserTask result = (UserTask) node.value().getContent().getDefinition(); assertCommonValues(result); assertTrue(result.getExecutionSet().getIsMultipleInstance().getValue()); assertEquals(SEQUENTIAL, result.getExecutionSet().getMultipleInstanceExecutionMode().isSequential()); assertEquals(COLLECTION_INPUT, result.getExecutionSet().getMultipleInstanceCollectionInput().getValue()); assertEquals(COLLECTION_OUTPUT, result.getExecutionSet().getMultipleInstanceCollectionOutput().getValue()); assertEquals(DATA_INPUT, result.getExecutionSet().getMultipleInstanceDataInput().getValue()); assertEquals(DATA_OUTPUT, result.getExecutionSet().getMultipleInstanceDataOutput().getValue()); assertEquals(COMPLETION_CONDITION, result.getExecutionSet().getMultipleInstanceCompletionCondition().getValue()); } @Test public void testConvertUserTaskNonMI() { when(userTaskPropertyReader.isMultipleInstance()).thenReturn(false); when(userTaskPropertyReader.isSequential()).thenReturn(false); when(userTaskPropertyReader.getCollectionInput()).thenReturn(null); when(userTaskPropertyReader.getDataInput()).thenReturn(null); when(userTaskPropertyReader.getCollectionOutput()).thenReturn(null); when(userTaskPropertyReader.getDataOutput()).thenReturn(null); when(userTaskPropertyReader.getCompletionCondition()).thenReturn(null); BpmnNode node = (BpmnNode) tested.convert(userTask).value(); UserTask result = (UserTask) node.value().getContent().getDefinition(); assertCommonValues(result); assertFalse(result.getExecutionSet().getIsMultipleInstance().getValue()); assertFalse(result.getExecutionSet().getMultipleInstanceExecutionMode().isSequential()); assertNull(result.getExecutionSet().getMultipleInstanceCollectionInput().getValue()); assertNull(result.getExecutionSet().getMultipleInstanceCollectionOutput().getValue()); assertNull(result.getExecutionSet().getMultipleInstanceDataInput().getValue()); assertNull(result.getExecutionSet().getMultipleInstanceDataOutput().getValue()); assertNull(result.getExecutionSet().getMultipleInstanceCompletionCondition().getValue()); } private void assertCommonValues(UserTask result) { assertEquals(NAME, result.getGeneral().getName().getValue()); assertEquals(DOCUMENTATION, result.getGeneral().getDocumentation().getValue()); assertEquals(SIMULATION_SET, result.getSimulationSet()); verify(userTaskContent).setBounds(BOUNDS); assertEquals(ACTORS, result.getExecutionSet().getActors()); assertEquals(GROUP_ID, result.getExecutionSet().getGroupid().getValue()); assertEquals(ASSIGNMENTS_INFO, result.getExecutionSet().getAssignmentsinfo()); assertEquals(IS_ASYNC, result.getExecutionSet().getIsAsync().getValue()); assertEquals(SKIPPABLE, result.getExecutionSet().getSkippable().getValue()); assertEquals(PRIORITY, result.getExecutionSet().getPriority().getValue()); assertEquals(SUBJECT, result.getExecutionSet().getSubject().getValue()); assertEquals(DESCRIPTION, result.getExecutionSet().getDescription().getValue()); assertEquals(CREATED_BY, result.getExecutionSet().getCreatedBy().getValue()); assertEquals(IS_AUTOSTART, result.getExecutionSet().getAdHocAutostart().getValue()); assertEquals(ON_ENTRY_ACTION, result.getExecutionSet().getOnEntryAction().getValue()); assertEquals(ON_EXIT_ACTION, result.getExecutionSet().getOnExitAction().getValue()); assertEquals(CONTENT, result.getExecutionSet().getContent().getValue()); assertEquals(SLA_DUE_DATE, result.getExecutionSet().getSlaDueDate().getValue()); } @Test public void testConvertManualTask() { testConvertToDefaultTask(Bpmn2Factory.eINSTANCE.createManualTask()); } @Test public void testConvertReceiveTask() { testConvertToDefaultTask(Bpmn2Factory.eINSTANCE.createReceiveTask()); } @Test public void testConvertSendTask() { testConvertToDefaultTask(Bpmn2Factory.eINSTANCE.createSendTask()); } @Test public void testConvertNoneTask() { testConvertToDefaultTask(Bpmn2Factory.eINSTANCE.createTask()); } private void testConvertToDefaultTask(Task task) { TaskPropertyReader reader = mock(TaskPropertyReader.class); when(propertyReaderFactory.of(task)).thenReturn(reader); Node<View<NoneTask>, Edge> taskNode = mock(Node.class); when(factoryManager.newNode(any(), eq(NoneTask.class))).thenReturn(taskNode); View<NoneTask> taskContent = mock(View.class); when(taskNode.getContent()).thenReturn(taskContent); NoneTask taskDef = mock(NoneTask.class); when(taskContent.getDefinition()).thenReturn(taskDef); Result<BpmnNode> result = tested.convert(task); verify(factoryManager).newNode(any(), eq(NoneTask.class)); assertTrue(result.value().value().getContent().getDefinition() instanceof NoneTask); } }
package org.bouncycastle.crypto.signers; import java.security.SecureRandom; import java.util.Hashtable; import org.bouncycastle.crypto.AsymmetricBlockCipher; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.CryptoException; import org.bouncycastle.crypto.Digest; import org.bouncycastle.crypto.InvalidCipherTextException; import org.bouncycastle.crypto.SignerWithRecovery; import org.bouncycastle.crypto.params.ParametersWithRandom; import org.bouncycastle.crypto.params.ParametersWithSalt; import org.bouncycastle.crypto.params.RSAKeyParameters; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.Integers; /** * ISO9796-2 - mechanism using a hash function with recovery (scheme 2 and 3). * <p/> * Note: the usual length for the salt is the length of the hash * function used in bytes. */ public class ISO9796d2PSSSigner implements SignerWithRecovery { static final public int TRAILER_IMPLICIT = 0xBC; static final public int TRAILER_RIPEMD160 = 0x31CC; static final public int TRAILER_RIPEMD128 = 0x32CC; static final public int TRAILER_SHA1 = 0x33CC; static final public int TRAILER_SHA256 = 0x34CC; static final public int TRAILER_SHA512 = 0x35CC; static final public int TRAILER_SHA384 = 0x36CC; static final public int TRAILER_WHIRLPOOL = 0x37CC; private static Hashtable trailerMap = new Hashtable(); static { trailerMap.put("RIPEMD128", Integers.valueOf(TRAILER_RIPEMD128)); trailerMap.put("RIPEMD160", Integers.valueOf(TRAILER_RIPEMD160)); trailerMap.put("SHA-1", Integers.valueOf(TRAILER_SHA1)); trailerMap.put("SHA-256", Integers.valueOf(TRAILER_SHA256)); trailerMap.put("SHA-384", Integers.valueOf(TRAILER_SHA384)); trailerMap.put("SHA-512", Integers.valueOf(TRAILER_SHA512)); trailerMap.put("Whirlpool", Integers.valueOf(TRAILER_WHIRLPOOL)); } private Digest digest; private AsymmetricBlockCipher cipher; private SecureRandom random; private byte[] standardSalt; private int hLen; private int trailer; private int keyBits; private byte[] block; private byte[] mBuf; private int messageLength; private int saltLength; private boolean fullMessage; private byte[] recoveredMessage; private byte[] preSig; private byte[] preBlock; private int preMStart; private int preTLength; /** * Generate a signer for the with either implicit or explicit trailers * for ISO9796-2, scheme 2 or 3. * * @param cipher base cipher to use for signature creation/verification * @param digest digest to use. * @param saltLength length of salt in bytes. * @param implicit whether or not the trailer is implicit or gives the hash. */ public ISO9796d2PSSSigner( AsymmetricBlockCipher cipher, Digest digest, int saltLength, boolean implicit) { this.cipher = cipher; this.digest = digest; this.hLen = digest.getDigestSize(); this.saltLength = saltLength; if (implicit) { trailer = TRAILER_IMPLICIT; } else { Integer trailerObj = (Integer)trailerMap.get(digest.getAlgorithmName()); if (trailerObj != null) { trailer = trailerObj.intValue(); } else { throw new IllegalArgumentException("no valid trailer for digest"); } } } /** * Constructor for a signer with an explicit digest trailer. * * @param cipher cipher to use. * @param digest digest to sign with. * @param saltLength length of salt in bytes. */ public ISO9796d2PSSSigner( AsymmetricBlockCipher cipher, Digest digest, int saltLength) { this(cipher, digest, saltLength, false); } /** * Initialise the signer. * * @param forSigning true if for signing, false if for verification. * @param param parameters for signature generation/verification. If the * parameters are for generation they should be a ParametersWithRandom, * a ParametersWithSalt, or just an RSAKeyParameters object. If RSAKeyParameters * are passed in a SecureRandom will be created. * @throws IllegalArgumentException if wrong parameter type or a fixed * salt is passed in which is the wrong length. */ public void init( boolean forSigning, CipherParameters param) { RSAKeyParameters kParam; int lengthOfSalt = saltLength; if (param instanceof ParametersWithRandom) { ParametersWithRandom p = (ParametersWithRandom)param; kParam = (RSAKeyParameters)p.getParameters(); if (forSigning) { random = p.getRandom(); } } else if (param instanceof ParametersWithSalt) { ParametersWithSalt p = (ParametersWithSalt)param; kParam = (RSAKeyParameters)p.getParameters(); standardSalt = p.getSalt(); lengthOfSalt = standardSalt.length; if (standardSalt.length != saltLength) { throw new IllegalArgumentException("Fixed salt is of wrong length"); } } else { kParam = (RSAKeyParameters)param; if (forSigning) { random = new SecureRandom(); } } cipher.init(forSigning, kParam); keyBits = kParam.getModulus().bitLength(); block = new byte[(keyBits + 7) / 8]; if (trailer == TRAILER_IMPLICIT) { mBuf = new byte[block.length - digest.getDigestSize() - lengthOfSalt - 1 - 1]; } else { mBuf = new byte[block.length - digest.getDigestSize() - lengthOfSalt - 1 - 2]; } reset(); } /** * compare two byte arrays - constant time */ private boolean isSameAs( byte[] a, byte[] b) { boolean isOkay = true; if (messageLength != b.length) { isOkay = false; } for (int i = 0; i != b.length; i++) { if (a[i] != b[i]) { isOkay = false; } } return isOkay; } /** * clear possible sensitive data */ private void clearBlock( byte[] block) { for (int i = 0; i != block.length; i++) { block[i] = 0; } } public void updateWithRecoveredMessage(byte[] signature) throws InvalidCipherTextException { byte[] block = cipher.processBlock(signature, 0, signature.length); // // adjust block size for leading zeroes if necessary // if (block.length < (keyBits + 7) / 8) { byte[] tmp = new byte[(keyBits + 7) / 8]; System.arraycopy(block, 0, tmp, tmp.length - block.length, block.length); clearBlock(block); block = tmp; } int tLength; if (((block[block.length - 1] & 0xFF) ^ 0xBC) == 0) { tLength = 1; } else { int sigTrail = ((block[block.length - 2] & 0xFF) << 8) | (block[block.length - 1] & 0xFF); Integer trailerObj = (Integer)trailerMap.get(digest.getAlgorithmName()); if (trailerObj != null) { if (sigTrail != trailerObj.intValue()) { throw new IllegalStateException("signer initialised with wrong digest for trailer " + sigTrail); } } else { throw new IllegalArgumentException("unrecognised hash in signature"); } tLength = 2; } // // calculate H(m2) // byte[] m2Hash = new byte[hLen]; digest.doFinal(m2Hash, 0); // // remove the mask // byte[] dbMask = maskGeneratorFunction1(block, block.length - hLen - tLength, hLen, block.length - hLen - tLength); for (int i = 0; i != dbMask.length; i++) { block[i] ^= dbMask[i]; } block[0] &= 0x7f; // // find out how much padding we've got // int mStart = 0; for (; mStart != block.length; mStart++) { if (block[mStart] == 0x01) { break; } } mStart++; if (mStart >= block.length) { clearBlock(block); } fullMessage = (mStart > 1); recoveredMessage = new byte[dbMask.length - mStart - saltLength]; System.arraycopy(block, mStart, recoveredMessage, 0, recoveredMessage.length); System.arraycopy(recoveredMessage, 0, mBuf, 0, recoveredMessage.length); preSig = signature; preBlock = block; preMStart = mStart; preTLength = tLength; } /** * update the internal digest with the byte b */ public void update( byte b) { if (preSig == null && messageLength < mBuf.length) { mBuf[messageLength++] = b; } else { digest.update(b); } } /** * update the internal digest with the byte array in */ public void update( byte[] in, int off, int len) { if (preSig == null) { while (len > 0 && messageLength < mBuf.length) { this.update(in[off]); off++; len--; } } if (len > 0) { digest.update(in, off, len); } } /** * reset the internal state */ public void reset() { digest.reset(); messageLength = 0; if (mBuf != null) { clearBlock(mBuf); } if (recoveredMessage != null) { clearBlock(recoveredMessage); recoveredMessage = null; } fullMessage = false; if (preSig != null) { preSig = null; clearBlock(preBlock); preBlock = null; } } /** * generate a signature for the loaded message using the key we were * initialised with. */ public byte[] generateSignature() throws CryptoException { int digSize = digest.getDigestSize(); byte[] m2Hash = new byte[digSize]; digest.doFinal(m2Hash, 0); byte[] C = new byte[8]; LtoOSP(messageLength * 8, C); digest.update(C, 0, C.length); digest.update(mBuf, 0, messageLength); digest.update(m2Hash, 0, m2Hash.length); byte[] salt; if (standardSalt != null) { salt = standardSalt; } else { salt = new byte[saltLength]; random.nextBytes(salt); } digest.update(salt, 0, salt.length); byte[] hash = new byte[digest.getDigestSize()]; digest.doFinal(hash, 0); int tLength = 2; if (trailer == TRAILER_IMPLICIT) { tLength = 1; } int off = block.length - messageLength - salt.length - hLen - tLength - 1; block[off] = 0x01; System.arraycopy(mBuf, 0, block, off + 1, messageLength); System.arraycopy(salt, 0, block, off + 1 + messageLength, salt.length); byte[] dbMask = maskGeneratorFunction1(hash, 0, hash.length, block.length - hLen - tLength); for (int i = 0; i != dbMask.length; i++) { block[i] ^= dbMask[i]; } System.arraycopy(hash, 0, block, block.length - hLen - tLength, hLen); if (trailer == TRAILER_IMPLICIT) { block[block.length - 1] = (byte)TRAILER_IMPLICIT; } else { block[block.length - 2] = (byte)(trailer >>> 8); block[block.length - 1] = (byte)trailer; } block[0] &= 0x7f; byte[] b = cipher.processBlock(block, 0, block.length); clearBlock(mBuf); clearBlock(block); messageLength = 0; return b; } /** * return true if the signature represents a ISO9796-2 signature * for the passed in message. */ public boolean verifySignature( byte[] signature) { // // calculate H(m2) // byte[] m2Hash = new byte[hLen]; digest.doFinal(m2Hash, 0); byte[] block; int tLength; int mStart = 0; if (preSig == null) { try { updateWithRecoveredMessage(signature); } catch (Exception e) { return false; } } else { if (!Arrays.areEqual(preSig, signature)) { throw new IllegalStateException("updateWithRecoveredMessage called on different signature"); } } block = preBlock; mStart = preMStart; tLength = preTLength; preSig = null; preBlock = null; // // check the hashes // byte[] C = new byte[8]; LtoOSP(recoveredMessage.length * 8, C); digest.update(C, 0, C.length); if (recoveredMessage.length != 0) { digest.update(recoveredMessage, 0, recoveredMessage.length); } digest.update(m2Hash, 0, m2Hash.length); // Update for the salt digest.update(block, mStart + recoveredMessage.length, saltLength); byte[] hash = new byte[digest.getDigestSize()]; digest.doFinal(hash, 0); int off = block.length - tLength - hash.length; boolean isOkay = true; for (int i = 0; i != hash.length; i++) { if (hash[i] != block[off + i]) { isOkay = false; } } clearBlock(block); clearBlock(hash); if (!isOkay) { fullMessage = false; clearBlock(recoveredMessage); return false; } // // if they've input a message check what we've recovered against // what was input. // if (messageLength != 0) { if (!isSameAs(mBuf, recoveredMessage)) { clearBlock(mBuf); return false; } messageLength = 0; } clearBlock(mBuf); return true; } /** * Return true if the full message was recoveredMessage. * * @return true on full message recovery, false otherwise, or if not sure. * @see org.bouncycastle.crypto.SignerWithRecovery#hasFullMessage() */ public boolean hasFullMessage() { return fullMessage; } /** * Return a reference to the recoveredMessage message. * * @return the full/partial recoveredMessage message. * @see org.bouncycastle.crypto.SignerWithRecovery#getRecoveredMessage() */ public byte[] getRecoveredMessage() { return recoveredMessage; } /** * int to octet string. */ private void ItoOSP( int i, byte[] sp) { sp[0] = (byte)(i >>> 24); sp[1] = (byte)(i >>> 16); sp[2] = (byte)(i >>> 8); sp[3] = (byte)(i >>> 0); } /** * long to octet string. */ private void LtoOSP( long l, byte[] sp) { sp[0] = (byte)(l >>> 56); sp[1] = (byte)(l >>> 48); sp[2] = (byte)(l >>> 40); sp[3] = (byte)(l >>> 32); sp[4] = (byte)(l >>> 24); sp[5] = (byte)(l >>> 16); sp[6] = (byte)(l >>> 8); sp[7] = (byte)(l >>> 0); } /** * mask generator function, as described in PKCS1v2. */ private byte[] maskGeneratorFunction1( byte[] Z, int zOff, int zLen, int length) { byte[] mask = new byte[length]; byte[] hashBuf = new byte[hLen]; byte[] C = new byte[4]; int counter = 0; digest.reset(); while (counter < (length / hLen)) { ItoOSP(counter, C); digest.update(Z, zOff, zLen); digest.update(C, 0, C.length); digest.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hLen, hLen); counter++; } if ((counter * hLen) < length) { ItoOSP(counter, C); digest.update(Z, zOff, zLen); digest.update(C, 0, C.length); digest.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hLen, mask.length - (counter * hLen)); } return mask; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sagemaker.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Configuration for downloading input data from Amazon S3 into the processing container. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ProcessingS3Input" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ProcessingS3Input implements Serializable, Cloneable, StructuredPojo { /** * <p> * The URI of the Amazon S3 prefix Amazon SageMaker downloads data required to run a processing job. * </p> */ private String s3Uri; /** * <p> * The local path in your container where you want Amazon SageMaker to write input data to. <code>LocalPath</code> * is an absolute path to the input data and must begin with <code>/opt/ml/processing/</code>. * <code>LocalPath</code> is a required parameter when <code>AppManaged</code> is <code>False</code> (default). * </p> */ private String localPath; /** * <p> * Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects with * the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, <code>S3Uri</code> * identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to * use for the processing job. * </p> */ private String s3DataType; /** * <p> * Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies the data * from the input source onto the local ML storage volume before starting your processing container. This is the * most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your processing container into named pipes without using the ML storage volume. * </p> */ private String s3InputMode; /** * <p> * Whether to distribute the data from Amazon S3 to all processing instances with <code>FullyReplicated</code>, or * whether the data from Amazon S3 is shared by Amazon S3 key, downloading one shard of data to each processing * instance. * </p> */ private String s3DataDistributionType; /** * <p> * Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the <code>S3InputMode</code>. In * <code>Pipe</code> mode, Amazon SageMaker streams input data from the source directly to your container without * using the EBS volume. * </p> */ private String s3CompressionType; /** * <p> * The URI of the Amazon S3 prefix Amazon SageMaker downloads data required to run a processing job. * </p> * * @param s3Uri * The URI of the Amazon S3 prefix Amazon SageMaker downloads data required to run a processing job. */ public void setS3Uri(String s3Uri) { this.s3Uri = s3Uri; } /** * <p> * The URI of the Amazon S3 prefix Amazon SageMaker downloads data required to run a processing job. * </p> * * @return The URI of the Amazon S3 prefix Amazon SageMaker downloads data required to run a processing job. */ public String getS3Uri() { return this.s3Uri; } /** * <p> * The URI of the Amazon S3 prefix Amazon SageMaker downloads data required to run a processing job. * </p> * * @param s3Uri * The URI of the Amazon S3 prefix Amazon SageMaker downloads data required to run a processing job. * @return Returns a reference to this object so that method calls can be chained together. */ public ProcessingS3Input withS3Uri(String s3Uri) { setS3Uri(s3Uri); return this; } /** * <p> * The local path in your container where you want Amazon SageMaker to write input data to. <code>LocalPath</code> * is an absolute path to the input data and must begin with <code>/opt/ml/processing/</code>. * <code>LocalPath</code> is a required parameter when <code>AppManaged</code> is <code>False</code> (default). * </p> * * @param localPath * The local path in your container where you want Amazon SageMaker to write input data to. * <code>LocalPath</code> is an absolute path to the input data and must begin with * <code>/opt/ml/processing/</code>. <code>LocalPath</code> is a required parameter when * <code>AppManaged</code> is <code>False</code> (default). */ public void setLocalPath(String localPath) { this.localPath = localPath; } /** * <p> * The local path in your container where you want Amazon SageMaker to write input data to. <code>LocalPath</code> * is an absolute path to the input data and must begin with <code>/opt/ml/processing/</code>. * <code>LocalPath</code> is a required parameter when <code>AppManaged</code> is <code>False</code> (default). * </p> * * @return The local path in your container where you want Amazon SageMaker to write input data to. * <code>LocalPath</code> is an absolute path to the input data and must begin with * <code>/opt/ml/processing/</code>. <code>LocalPath</code> is a required parameter when * <code>AppManaged</code> is <code>False</code> (default). */ public String getLocalPath() { return this.localPath; } /** * <p> * The local path in your container where you want Amazon SageMaker to write input data to. <code>LocalPath</code> * is an absolute path to the input data and must begin with <code>/opt/ml/processing/</code>. * <code>LocalPath</code> is a required parameter when <code>AppManaged</code> is <code>False</code> (default). * </p> * * @param localPath * The local path in your container where you want Amazon SageMaker to write input data to. * <code>LocalPath</code> is an absolute path to the input data and must begin with * <code>/opt/ml/processing/</code>. <code>LocalPath</code> is a required parameter when * <code>AppManaged</code> is <code>False</code> (default). * @return Returns a reference to this object so that method calls can be chained together. */ public ProcessingS3Input withLocalPath(String localPath) { setLocalPath(localPath); return this; } /** * <p> * Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects with * the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, <code>S3Uri</code> * identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to * use for the processing job. * </p> * * @param s3DataType * Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects * with the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, * <code>S3Uri</code> identifies an object that is a manifest file containing a list of object keys that you * want Amazon SageMaker to use for the processing job. * @see ProcessingS3DataType */ public void setS3DataType(String s3DataType) { this.s3DataType = s3DataType; } /** * <p> * Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects with * the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, <code>S3Uri</code> * identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to * use for the processing job. * </p> * * @return Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects * with the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, * <code>S3Uri</code> identifies an object that is a manifest file containing a list of object keys that you * want Amazon SageMaker to use for the processing job. * @see ProcessingS3DataType */ public String getS3DataType() { return this.s3DataType; } /** * <p> * Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects with * the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, <code>S3Uri</code> * identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to * use for the processing job. * </p> * * @param s3DataType * Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects * with the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, * <code>S3Uri</code> identifies an object that is a manifest file containing a list of object keys that you * want Amazon SageMaker to use for the processing job. * @return Returns a reference to this object so that method calls can be chained together. * @see ProcessingS3DataType */ public ProcessingS3Input withS3DataType(String s3DataType) { setS3DataType(s3DataType); return this; } /** * <p> * Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects with * the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, <code>S3Uri</code> * identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to * use for the processing job. * </p> * * @param s3DataType * Whether you use an <code>S3Prefix</code> or a <code>ManifestFile</code> for the data type. If you choose * <code>S3Prefix</code>, <code>S3Uri</code> identifies a key name prefix. Amazon SageMaker uses all objects * with the specified key name prefix for the processing job. If you choose <code>ManifestFile</code>, * <code>S3Uri</code> identifies an object that is a manifest file containing a list of object keys that you * want Amazon SageMaker to use for the processing job. * @return Returns a reference to this object so that method calls can be chained together. * @see ProcessingS3DataType */ public ProcessingS3Input withS3DataType(ProcessingS3DataType s3DataType) { this.s3DataType = s3DataType.toString(); return this; } /** * <p> * Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies the data * from the input source onto the local ML storage volume before starting your processing container. This is the * most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your processing container into named pipes without using the ML storage volume. * </p> * * @param s3InputMode * Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies * the data from the input source onto the local ML storage volume before starting your processing container. * This is the most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams input data * from the source directly to your processing container into named pipes without using the ML storage * volume. * @see ProcessingS3InputMode */ public void setS3InputMode(String s3InputMode) { this.s3InputMode = s3InputMode; } /** * <p> * Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies the data * from the input source onto the local ML storage volume before starting your processing container. This is the * most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your processing container into named pipes without using the ML storage volume. * </p> * * @return Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies * the data from the input source onto the local ML storage volume before starting your processing * container. This is the most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams * input data from the source directly to your processing container into named pipes without using the ML * storage volume. * @see ProcessingS3InputMode */ public String getS3InputMode() { return this.s3InputMode; } /** * <p> * Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies the data * from the input source onto the local ML storage volume before starting your processing container. This is the * most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your processing container into named pipes without using the ML storage volume. * </p> * * @param s3InputMode * Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies * the data from the input source onto the local ML storage volume before starting your processing container. * This is the most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams input data * from the source directly to your processing container into named pipes without using the ML storage * volume. * @return Returns a reference to this object so that method calls can be chained together. * @see ProcessingS3InputMode */ public ProcessingS3Input withS3InputMode(String s3InputMode) { setS3InputMode(s3InputMode); return this; } /** * <p> * Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies the data * from the input source onto the local ML storage volume before starting your processing container. This is the * most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your processing container into named pipes without using the ML storage volume. * </p> * * @param s3InputMode * Whether to use <code>File</code> or <code>Pipe</code> input mode. In File mode, Amazon SageMaker copies * the data from the input source onto the local ML storage volume before starting your processing container. * This is the most commonly used input mode. In <code>Pipe</code> mode, Amazon SageMaker streams input data * from the source directly to your processing container into named pipes without using the ML storage * volume. * @return Returns a reference to this object so that method calls can be chained together. * @see ProcessingS3InputMode */ public ProcessingS3Input withS3InputMode(ProcessingS3InputMode s3InputMode) { this.s3InputMode = s3InputMode.toString(); return this; } /** * <p> * Whether to distribute the data from Amazon S3 to all processing instances with <code>FullyReplicated</code>, or * whether the data from Amazon S3 is shared by Amazon S3 key, downloading one shard of data to each processing * instance. * </p> * * @param s3DataDistributionType * Whether to distribute the data from Amazon S3 to all processing instances with * <code>FullyReplicated</code>, or whether the data from Amazon S3 is shared by Amazon S3 key, downloading * one shard of data to each processing instance. * @see ProcessingS3DataDistributionType */ public void setS3DataDistributionType(String s3DataDistributionType) { this.s3DataDistributionType = s3DataDistributionType; } /** * <p> * Whether to distribute the data from Amazon S3 to all processing instances with <code>FullyReplicated</code>, or * whether the data from Amazon S3 is shared by Amazon S3 key, downloading one shard of data to each processing * instance. * </p> * * @return Whether to distribute the data from Amazon S3 to all processing instances with * <code>FullyReplicated</code>, or whether the data from Amazon S3 is shared by Amazon S3 key, downloading * one shard of data to each processing instance. * @see ProcessingS3DataDistributionType */ public String getS3DataDistributionType() { return this.s3DataDistributionType; } /** * <p> * Whether to distribute the data from Amazon S3 to all processing instances with <code>FullyReplicated</code>, or * whether the data from Amazon S3 is shared by Amazon S3 key, downloading one shard of data to each processing * instance. * </p> * * @param s3DataDistributionType * Whether to distribute the data from Amazon S3 to all processing instances with * <code>FullyReplicated</code>, or whether the data from Amazon S3 is shared by Amazon S3 key, downloading * one shard of data to each processing instance. * @return Returns a reference to this object so that method calls can be chained together. * @see ProcessingS3DataDistributionType */ public ProcessingS3Input withS3DataDistributionType(String s3DataDistributionType) { setS3DataDistributionType(s3DataDistributionType); return this; } /** * <p> * Whether to distribute the data from Amazon S3 to all processing instances with <code>FullyReplicated</code>, or * whether the data from Amazon S3 is shared by Amazon S3 key, downloading one shard of data to each processing * instance. * </p> * * @param s3DataDistributionType * Whether to distribute the data from Amazon S3 to all processing instances with * <code>FullyReplicated</code>, or whether the data from Amazon S3 is shared by Amazon S3 key, downloading * one shard of data to each processing instance. * @return Returns a reference to this object so that method calls can be chained together. * @see ProcessingS3DataDistributionType */ public ProcessingS3Input withS3DataDistributionType(ProcessingS3DataDistributionType s3DataDistributionType) { this.s3DataDistributionType = s3DataDistributionType.toString(); return this; } /** * <p> * Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the <code>S3InputMode</code>. In * <code>Pipe</code> mode, Amazon SageMaker streams input data from the source directly to your container without * using the EBS volume. * </p> * * @param s3CompressionType * Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the * <code>S3InputMode</code>. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your container without using the EBS volume. * @see ProcessingS3CompressionType */ public void setS3CompressionType(String s3CompressionType) { this.s3CompressionType = s3CompressionType; } /** * <p> * Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the <code>S3InputMode</code>. In * <code>Pipe</code> mode, Amazon SageMaker streams input data from the source directly to your container without * using the EBS volume. * </p> * * @return Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the * <code>S3InputMode</code>. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your container without using the EBS volume. * @see ProcessingS3CompressionType */ public String getS3CompressionType() { return this.s3CompressionType; } /** * <p> * Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the <code>S3InputMode</code>. In * <code>Pipe</code> mode, Amazon SageMaker streams input data from the source directly to your container without * using the EBS volume. * </p> * * @param s3CompressionType * Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the * <code>S3InputMode</code>. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your container without using the EBS volume. * @return Returns a reference to this object so that method calls can be chained together. * @see ProcessingS3CompressionType */ public ProcessingS3Input withS3CompressionType(String s3CompressionType) { setS3CompressionType(s3CompressionType); return this; } /** * <p> * Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the <code>S3InputMode</code>. In * <code>Pipe</code> mode, Amazon SageMaker streams input data from the source directly to your container without * using the EBS volume. * </p> * * @param s3CompressionType * Whether to GZIP-decompress the data in Amazon S3 as it is streamed into the processing container. * <code>Gzip</code> can only be used when <code>Pipe</code> mode is specified as the * <code>S3InputMode</code>. In <code>Pipe</code> mode, Amazon SageMaker streams input data from the source * directly to your container without using the EBS volume. * @return Returns a reference to this object so that method calls can be chained together. * @see ProcessingS3CompressionType */ public ProcessingS3Input withS3CompressionType(ProcessingS3CompressionType s3CompressionType) { this.s3CompressionType = s3CompressionType.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getS3Uri() != null) sb.append("S3Uri: ").append(getS3Uri()).append(","); if (getLocalPath() != null) sb.append("LocalPath: ").append(getLocalPath()).append(","); if (getS3DataType() != null) sb.append("S3DataType: ").append(getS3DataType()).append(","); if (getS3InputMode() != null) sb.append("S3InputMode: ").append(getS3InputMode()).append(","); if (getS3DataDistributionType() != null) sb.append("S3DataDistributionType: ").append(getS3DataDistributionType()).append(","); if (getS3CompressionType() != null) sb.append("S3CompressionType: ").append(getS3CompressionType()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ProcessingS3Input == false) return false; ProcessingS3Input other = (ProcessingS3Input) obj; if (other.getS3Uri() == null ^ this.getS3Uri() == null) return false; if (other.getS3Uri() != null && other.getS3Uri().equals(this.getS3Uri()) == false) return false; if (other.getLocalPath() == null ^ this.getLocalPath() == null) return false; if (other.getLocalPath() != null && other.getLocalPath().equals(this.getLocalPath()) == false) return false; if (other.getS3DataType() == null ^ this.getS3DataType() == null) return false; if (other.getS3DataType() != null && other.getS3DataType().equals(this.getS3DataType()) == false) return false; if (other.getS3InputMode() == null ^ this.getS3InputMode() == null) return false; if (other.getS3InputMode() != null && other.getS3InputMode().equals(this.getS3InputMode()) == false) return false; if (other.getS3DataDistributionType() == null ^ this.getS3DataDistributionType() == null) return false; if (other.getS3DataDistributionType() != null && other.getS3DataDistributionType().equals(this.getS3DataDistributionType()) == false) return false; if (other.getS3CompressionType() == null ^ this.getS3CompressionType() == null) return false; if (other.getS3CompressionType() != null && other.getS3CompressionType().equals(this.getS3CompressionType()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getS3Uri() == null) ? 0 : getS3Uri().hashCode()); hashCode = prime * hashCode + ((getLocalPath() == null) ? 0 : getLocalPath().hashCode()); hashCode = prime * hashCode + ((getS3DataType() == null) ? 0 : getS3DataType().hashCode()); hashCode = prime * hashCode + ((getS3InputMode() == null) ? 0 : getS3InputMode().hashCode()); hashCode = prime * hashCode + ((getS3DataDistributionType() == null) ? 0 : getS3DataDistributionType().hashCode()); hashCode = prime * hashCode + ((getS3CompressionType() == null) ? 0 : getS3CompressionType().hashCode()); return hashCode; } @Override public ProcessingS3Input clone() { try { return (ProcessingS3Input) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.sagemaker.model.transform.ProcessingS3InputMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package de.uni_heidelberg.cos.agw.ij; import de.uni_heidelberg.cos.agw.ij.util.Util; import de.uni_heidelberg.cos.agw.imglib2.realtransform.AzimuthalEquidistantToCartesianTransform; import de.uni_heidelberg.cos.agw.imglib2.realtransform.CylindricalToCartesianIntervalTransform; import de.uni_heidelberg.cos.agw.imglib2.realtransform.EquirectangularToCartesianTransform; import de.uni_heidelberg.cos.agw.imglib2.realtransform.PositionableRealTransform; import ij.IJ; import ij.ImagePlus; import ij.ImageStack; import ij.gui.GenericDialog; import ij.plugin.filter.PlugInFilter; import ij.process.ImageProcessor; import net.imglib2.Cursor; import net.imglib2.Interval; import net.imglib2.RealRandomAccess; import net.imglib2.img.Img; import net.imglib2.img.ImgFactory; import net.imglib2.img.display.imagej.ImageJFunctions; import net.imglib2.interpolation.InterpolatorFactory; import net.imglib2.interpolation.randomaccess.LanczosInterpolatorFactory; import net.imglib2.interpolation.randomaccess.NLinearInterpolatorFactory; import net.imglib2.interpolation.randomaccess.NearestNeighborInterpolatorFactory; import net.imglib2.realtransform.RealTransform; import net.imglib2.type.NativeType; import net.imglib2.type.numeric.NumericType; import net.imglib2.type.numeric.RealType; import net.imglib2.type.numeric.integer.GenericShortType; import net.imglib2.view.Views; public class MapTransform<T extends NumericType<T> & RealType<T> & NativeType<T>, V extends RealTransform & Interval> implements PlugInFilter { private static double centerX = 600; private static double centerY = 600; private static double centerZ = 425; private static double rotationX = 0; private static double rotationY = 0; private static double rotationSelf = 0; private static double innerRadius = 200; private static double outerRadius = 425; private static double stdRadiusOffset = 0.7; private static double scale = 1; private static int interpolationIndex = 1; private static int transformationIndex = 0; private static boolean doMakeImageJ1Output = true; private static double cylinderHeight = 512; private final String pluginName = "Map Transform"; private final String[] interpolations = {"Nearest Neighbor", "Linear", "Lanczos"}; private final String[] transformations = {"Equirectangular", "Azimuthal Equidistant", "Cylindrical"}; private Img<T> inputImg; private String imageTitle; @Override public int setup(String args, ImagePlus imp) { try { inputImg = ImageJFunctions.wrap(imp); imageTitle = imp.getTitle(); } catch (NullPointerException ex) { } return STACK_REQUIRED + DOES_ALL; } @Override public void run(ImageProcessor ip) { cylinderHeight = inputImg.dimension(0); GenericDialog dialog = new GenericDialog(pluginName); dialog.addChoice("Transformation", transformations, transformations[transformationIndex]); dialog.addNumericField("Center_x", centerX, 2, 7, "voxel"); dialog.addNumericField("Center_y", centerY, 2, 7, "voxel"); dialog.addNumericField("Center_z", centerZ, 2, 7, "voxel"); dialog.addNumericField("Rotation_x", Math.toDegrees(rotationX), 2, 7, "degrees"); dialog.addNumericField("Rotation_y", Math.toDegrees(rotationY), 2, 7, "degrees"); dialog.addNumericField("Rotation_self", Math.toDegrees(rotationSelf), 2, 7, "degrees"); dialog.addNumericField("Inner_radius", innerRadius, 2, 7, "voxels"); dialog.addNumericField("Outer_radius", outerRadius, 2, 7, "voxels"); dialog.addNumericField("Standard_radius_offset", stdRadiusOffset, 2, 7, "0-1"); dialog.addNumericField("Scale", scale, 2, 7, "x"); dialog.addChoice("Interpolation", interpolations, interpolations[interpolationIndex]); dialog.addCheckbox("ImageJ1_output", doMakeImageJ1Output); dialog.addNumericField("Cylinder_height", cylinderHeight, 2, 7, "voxels"); dialog.showDialog(); if (dialog.wasCanceled()) { return; } transformationIndex = dialog.getNextChoiceIndex(); centerX = dialog.getNextNumber(); centerY = dialog.getNextNumber(); centerZ = dialog.getNextNumber(); rotationX = Math.toRadians(dialog.getNextNumber()); rotationY = Math.toRadians(dialog.getNextNumber()); rotationSelf = Math.toRadians(dialog.getNextNumber()); innerRadius = dialog.getNextNumber(); outerRadius = dialog.getNextNumber(); stdRadiusOffset = dialog.getNextNumber(); scale = dialog.getNextNumber(); interpolationIndex = dialog.getNextChoiceIndex(); doMakeImageJ1Output = dialog.getNextBoolean(); cylinderHeight = dialog.getNextNumber(); final double[] translation = {centerX, centerY, centerZ}; final double[] rotation = {rotationX, rotationY, rotationSelf}; if (stdRadiusOffset <= 0 || stdRadiusOffset > 1) { IJ.error(pluginName, "Standard radius offset must be between 0 and 1."); return; } if (scale <= 0) { IJ.error(pluginName, "Scale must be greater than 0."); return; } InterpolatorFactory interpolation = null; switch (interpolationIndex) { case 0: interpolation = new NearestNeighborInterpolatorFactory<T>(); break; case 2: interpolation = new LanczosInterpolatorFactory<T>(); break; default: interpolation = new NLinearInterpolatorFactory<T>(); break; } final RealRandomAccess<T> inputRa = Views.interpolate(Views.extendZero(inputImg), interpolation).realRandomAccess(); V transform = null; String transformName; switch (transformationIndex) { case 1: transform = (V) (new AzimuthalEquidistantToCartesianTransform(innerRadius, outerRadius, stdRadiusOffset, scale)); transformName = "AzimuthalEquidistant"; break; case 2: transform = (V) (new CylindricalToCartesianIntervalTransform(cylinderHeight, innerRadius, outerRadius, stdRadiusOffset, scale)); transformName = "Cylindrical"; break; default: transform = (V) (new EquirectangularToCartesianTransform(innerRadius, outerRadius, stdRadiusOffset, scale)); transformName = "Equirectangular"; break; } final Transformation transformation = new Transformation(transform, translation, rotation, inputRa); final String filenameParams = String.format( "-%s-cx%.2f-cy%.2f-cz%.2f-rx%.2f-ry%.2f-rs%.2f-ri%.2f-ro%.2f-sr%.2f-sc%.2f", transformName, centerX, centerY, centerZ, rotationX, rotationY, rotationSelf, innerRadius, outerRadius, stdRadiusOffset, scale); final String fileName = Util.addToFilename(imageTitle, filenameParams); if (doMakeImageJ1Output) { ImagePlus outputImp = transformation.computeIj1(inputImg.firstElement()); outputImp.setTitle(fileName); outputImp.show(); } else { Img<T> outputImg = transformation.compute(inputImg.factory(), inputImg.firstElement()); ImageJFunctions.show(outputImg, fileName); } } } class Transformation<T extends NumericType<T> & RealType<T> & NativeType<T>, V extends RealTransform & Interval> { private final long[] outputDimensions; private final PositionableRealTransform transform; private final RealRandomAccess<T> inputRa; public Transformation(final V transformInterval, final double[] translation, final double[] rotation, final RealRandomAccess<T> sourceRa) { outputDimensions = new long[transformInterval.numDimensions()]; transformInterval.dimensions(outputDimensions); transform = new PositionableRealTransform(transformInterval); for (int d = 0; d < outputDimensions.length; ++d) { transform.setPosition(translation[d], d); transform.rotate(d, rotation[d]); } inputRa = sourceRa; } public Img<T> compute(final ImgFactory<T> factory, final T element) { final Img<T> outputImg = factory.create(outputDimensions, element); final Cursor<T> outputCur = outputImg.localizingCursor(); while (outputCur.hasNext()) { outputCur.next(); transform.apply(outputCur, inputRa); outputCur.get().set(inputRa.get()); } return outputImg; } public ImagePlus computeIj1(final T element) { int bits = 8; if (element instanceof GenericShortType) { bits = 16; } final ImagePlus outputImp = IJ.createImage("", (int) outputDimensions[0], (int) outputDimensions[1], (int) outputDimensions[2], bits); final ImageStack stack = outputImp.getImageStack(); final double[] cylindrical = new double[3]; final double[] cartesian = new double[3]; for (int z = 0; z < outputDimensions[2]; ++z) { cylindrical[2] = z; for (int y = 0; y < outputDimensions[1]; ++y) { cylindrical[1] = y; for (int x = 0; x < outputDimensions[0]; ++x) { cylindrical[0] = x; transform.apply(cylindrical, cartesian); inputRa.setPosition(cartesian); stack.setVoxel((int) Math.round(cylindrical[0]), (int) Math.round(cylindrical[1]), (int) Math.round(cylindrical[2]), inputRa.get().getRealDouble()); } } IJ.showProgress(z + 1, (int) outputDimensions[2]); } return outputImp; } }
/* * Copyright 2015 Suprema(biostar2@suprema.co.kr) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.supremainc.biostar2.fragment; import android.Manifest; import android.annotation.SuppressLint; import android.app.Activity; import android.app.DatePickerDialog.OnDateSetListener; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.content.pm.PermissionGroupInfo; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.Settings; import android.support.design.widget.Snackbar; import android.support.v4.app.ActivityCompat; import android.support.v4.content.LocalBroadcastManager; import android.text.InputType; import android.util.Base64; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.DatePicker; import android.widget.ScrollView; import android.widget.TextView; import com.supremainc.biostar2.BuildConfig; import com.supremainc.biostar2.R; import com.supremainc.biostar2.impl.OnSingleClickListener; import com.supremainc.biostar2.meta.Setting; import com.supremainc.biostar2.sdk.models.v1.permission.CloudRole; import com.supremainc.biostar2.sdk.models.v2.accesscontrol.ListAccessGroup; import com.supremainc.biostar2.sdk.models.v2.common.BioStarSetting; import com.supremainc.biostar2.sdk.models.v2.common.ResponseStatus; import com.supremainc.biostar2.sdk.models.v2.common.VersionData; import com.supremainc.biostar2.sdk.models.v2.permission.UserPermission; import com.supremainc.biostar2.sdk.models.v2.user.BaseUserGroup; import com.supremainc.biostar2.sdk.models.v2.user.User; import com.supremainc.biostar2.sdk.models.v2.user.UserGroup; import com.supremainc.biostar2.sdk.provider.DateTimeDataProvider; import com.supremainc.biostar2.sdk.utils.ImageUtil; import com.supremainc.biostar2.util.InvalidChecker; import com.supremainc.biostar2.util.TextInputFilter; import com.supremainc.biostar2.view.DetailEditItemView; import com.supremainc.biostar2.view.DetailSwitchItemView; import com.supremainc.biostar2.view.DetailTextItemView; import com.supremainc.biostar2.view.StyledTextView; import com.supremainc.biostar2.view.SummaryUserView; import com.supremainc.biostar2.view.SwitchView; import com.supremainc.biostar2.widget.DateTimePicker; import com.supremainc.biostar2.widget.ScreenControl.ScreenType; import com.supremainc.biostar2.widget.popup.PasswordPopup; import com.supremainc.biostar2.widget.popup.Popup; import com.supremainc.biostar2.widget.popup.Popup.PopupType; import com.supremainc.biostar2.widget.popup.SelectCustomData; import com.supremainc.biostar2.widget.popup.SelectPopup; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Calendar; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; @SuppressLint("InflateParams") public class UserModifyFragment extends BaseFragment { private static final int DELETE_PICTURE = 2; private static final int FROM_GALLERY = 1; private static final int REQ_ACTIVITY_CAMERA = 1; private static final int REQ_ACTIVITY_CAMERA_CROP = 2; private static final int TAKE_PICTURE = 0; boolean mIsNewUser = false; private DateTimePicker mDateTimePicker; private InvalidChecker mInvalidChecker; private UserGroup mInitUserGroup; private String mBackupPhoto = null; private Bitmap mBmpBlur; private int mEndDay; private int mEndMonth; private int mEndYear; private String mPasswordData; private PhotoStatus mPhotoStatus = PhotoStatus.NOT_MODIFY; private String mPinData; private SwitchView mPinSwitch; private SwitchView mStatusSwitch; private Bitmap mBmpRound; private int mStartDay; private int mStartMonth; private int mStartYear; private User mUserInfo; private SummaryUserView mSummaryUserView; private DetailEditItemView mUserIDView; private DetailEditItemView mUserNameView; private DetailEditItemView mEmailView; private DetailEditItemView mTelephoneView; private DetailTextItemView mOperatorView; private DetailEditItemView mLoginIDView; private DetailTextItemView mLoginPasswordView; private DetailTextItemView mUserGroupView; private DetailSwitchItemView mStatusView; private StyledTextView mDateStartView; private StyledTextView mDateEndView; private DetailTextItemView mAccessGroupView; private DetailTextItemView mFingerPrintView; private DetailTextItemView mCardView; private DetailTextItemView mFaceView; private DetailSwitchItemView mPinView; private TextInputFilter mTextInputFilter; private SummaryUserView.SummaryUserViewListener mSummaryUserViewListener = new SummaryUserView.SummaryUserViewListener() { @Override public void editPhoto() { editUserImage(); } }; private Popup.OnPopupClickListener mPopupSucess = new Popup.OnPopupClickListener() { @Override public void OnNegative() { } @Override public void OnPositive() { if (mIsNewUser) { sendLocalBroadcast(Setting.BROADCAST_USER, null); } mScreenControl.backScreen(); } }; private OnDateSetListener mStartDateListener = new OnDateSetListener() { @Override public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { if (mDateTimePicker.isErrorSetDate(year, monthOfYear, dayOfMonth, mEndYear, mEndMonth, mEndDay)) { mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.error_set_date), null, null, null); return; } if (year < 2000) { mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.error_set_start_date), null, null, null); return; } mStartYear = year; mStartMonth = monthOfYear; mStartDay = dayOfMonth; mDateStartView.setText(mDateTimePicker.getDateString(mStartYear, mStartMonth, mStartDay)); } }; private OnDateSetListener mEndDateListener = new OnDateSetListener() { @Override public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { if (mDateTimePicker.isErrorSetDate(mStartYear, mStartMonth, mStartDay, year, monthOfYear, dayOfMonth)) { mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.error_set_date), null, null, null); return; } if (year > 2030) { mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.error_set_end_date), null, null, null); return; // year = 2030; } mEndYear = year; mEndMonth = monthOfYear; mEndDay = dayOfMonth; mDateEndView.setText(mDateTimePicker.getDateString(mEndYear, mEndMonth, mEndDay)); } }; private Runnable mRunRditUserImage = new Runnable() { @Override public void run() { editUserImage(); } }; private Runnable mRunDeny = new Runnable() { @Override public void run() { if (Build.VERSION.SDK_INT >= 23) { String permissionLabel = ""; try { PackageManager pm = mActivity.getPackageManager(); PermissionGroupInfo pg = pm.getPermissionGroupInfo(Manifest.permission_group.STORAGE, PackageManager.GET_META_DATA); permissionLabel = pg.loadLabel(pm).toString(); } catch (Exception e) { } if (!permissionLabel.isEmpty()) { permissionLabel = "(" + permissionLabel + ")"; } permissionLabel = getString(R.string.guide_feature_permission) + " " + getString(R.string.allow_permission) + permissionLabel; Snackbar snackbar = Snackbar .make(mRootView, permissionLabel, Snackbar.LENGTH_LONG) .setAction(getString(R.string.permission_setting), new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(); intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS); intent.setData(Uri.parse("package:" + mActivity.getPackageName())); mActivity.startActivity(intent); } }); //snackbar.setActionTextColor(Color.MAGENTA); View snackbarView = snackbar.getView(); TextView textView = (TextView) snackbarView.findViewById(android.support.design.R.id.snackbar_text); textView.setMaxLines(5); snackbar.show(); } } }; private Callback<User> mLoginListener = new Callback<User>() { @Override public void onFailure(Call<User> call, Throwable t) { if (isIgnoreCallback(call, false)) { return; } } @Override public void onResponse(Call<User> call, Response<User> response) { if (isIgnoreCallback(call, response, false)) { return; } if (isInvalidResponse(response, false, false)) { return; } LocalBroadcastManager.getInstance(mActivity).sendBroadcast(new Intent(Setting.BROADCAST_REROGIN)); } }; private Callback<BioStarSetting> mSettingListener = new Callback<BioStarSetting>() { @Override public void onFailure(Call<BioStarSetting> call, Throwable t) { if (isIgnoreCallback(call, true)) { return; } showPasswodPopup(); } @Override public void onResponse(Call<BioStarSetting> call, Response<BioStarSetting> response) { if (isIgnoreCallback(call, response, true)) { return; } showPasswodPopup(); } }; private Callback<User> mSettingListener2 = new Callback<User>() { @Override public void onFailure(Call<User> call, Throwable t) { if (isIgnoreCallback(call, true)) { return; } showPasswodPopup(); } @Override public void onResponse(Call<User> call, Response<User> response) { if (isIgnoreCallback(call, response, true)) { return; } showPasswodPopup(); } }; private OnSingleClickListener mClickListener = new OnSingleClickListener() { @Override public void onSingleClick(View v) { switch (v.getId()) { case R.id.user_id: // intent miss break case R.id.email: // intent miss break case R.id.telephone: // intent miss break case R.id.login_id: // intent miss break case R.id.user_name: break; default: hideIme(mUserNameView.content); } switch (v.getId()) { case R.id.user_id: // intent miss break case R.id.email: // intent miss break case R.id.telephone: // intent miss break case R.id.login_id: // intent miss break case R.id.user_name: { Log.e(TAG, "onClick use edit"); DetailEditItemView view = (DetailEditItemView) v; view.content.setSelection(view.content.toString2().length()); showIme(view.content); break; } case R.id.operator: { editOperator(); break; } case R.id.login_password: { mPopup.showWait(mCancelStayListener); if (VersionData.getCloudVersion(mActivity) > 1) { request(mCommonDataProvider.getBioStarSetting(mSettingListener)); } else { request(mCommonDataProvider.simpleLogin(mSettingListener2)); } break; } case R.id.user_group: { editUserGroup(); break; } case R.id.status: { mStatusView.mSwitchView.setSwitch(!mUserInfo.isActive()); break; } case R.id.date_edit: case R.id.date_arrow: { selectDatePicker(); break; } case R.id.date_start: { mDateTimePicker.showDatePicker(mStartDateListener, mStartYear, mStartMonth, mStartDay); break; } case R.id.date_end: { mDateTimePicker.showDatePicker(mEndDateListener, mEndYear, mEndMonth, mEndDay); break; } case R.id.access_group: { editAccessGroup(); break; } case R.id.fingerprint: { editFingerPrint(); break; } case R.id.card: { editCard(); break; } case R.id.face: { editFace(); break; } case R.id.pin: { mPinSwitch.setSwitch(!mPinSwitch.getOn()); break; } } } }; private Callback<User> mUpdateUserListener = new Callback<User>() { @Override public void onFailure(Call<User> call, Throwable t) { if (isIgnoreCallback(call, true)) { return; } try { sendLocalBroadcast(Setting.BROADCAST_USER, mUserInfo.clone()); } catch (CloneNotSupportedException e) { showErrorPopup(e.getMessage(), false); return; } mPopup.show(PopupType.CONFIRM, getString(R.string.info), getString(R.string.user_modify_success), mPopupSucess, null, null); } @Override public void onResponse(Call<User> call, Response<User> response) { if (isIgnoreCallback(call, response, true)) { return; } if (isInvalidResponse(response, true, false)) { return; } try { mUserInfo = response.body().clone(); if (mUserInfo.fingerprint_templates != null) { mUserInfo.fingerprint_count = mUserInfo.fingerprint_templates.size(); } if (mUserInfo.cards != null) { mUserInfo.fingerprint_count = mUserInfo.cards.size(); mUserInfo.card_count = mUserInfo.cards.size(); } if (mUserInfo.photo != null && !mUserInfo.photo.isEmpty()) { mUserInfo.photo_exist = true; } setView(); } catch (Exception e) { } try { sendLocalBroadcast(Setting.BROADCAST_USER, mUserInfo.clone()); } catch (CloneNotSupportedException e) { showErrorPopup(e.getMessage(), false); return; } mPopup.show(PopupType.CONFIRM, getString(R.string.info), getString(R.string.user_modify_success), mPopupSucess, null, null); } }; private Popup.OnPopupClickListener mNextSuccess = new Popup.OnPopupClickListener() { @Override public void OnNegative() { sendLocalBroadcast(Setting.BROADCAST_USER, null); mScreenControl.backScreen(); } @Override public void OnPositive() { mIsNewUser = false; mCardView.setVisibility(View.VISIBLE); mFingerPrintView.setVisibility(View.VISIBLE); mFaceView.setVisibility(View.VISIBLE); sendLocalBroadcast(Setting.BROADCAST_USER, null); setView(); initActionbar(mUserInfo.name, R.drawable.action_bar_bg); mHandler.post(new Runnable() { @Override public void run() { ScrollView sv = (ScrollView) mRootView.findViewById(R.id.scroll_container); if (sv != null) { sv.fullScroll(View.FOCUS_DOWN); } } }); } }; private Callback<ResponseStatus> mCreateUserListener = new Callback<ResponseStatus>() { @Override public void onFailure(Call<ResponseStatus> call, Throwable t) { if (isIgnoreCallback(call, true)) { return; } showErrorPopup(t.getMessage(), false); } @Override public void onResponse(Call<ResponseStatus> call, Response<ResponseStatus> response) { if (isIgnoreCallback(call, response, true)) { return; } if (isInvalidResponse(response, true, false)) { return; } mUserInfo.photo = mBackupPhoto; if (mPhotoStatus == PhotoStatus.DELETE) { mBackupPhoto = null; mUserInfo.photo = null; } sendLocalBroadcast(Setting.BROADCAST_USER_COUNT, null); if (VersionData.getCloudVersion(mActivity) > 1) { mPopup.show(PopupType.CONFIRM, getString(R.string.info), getString(R.string.user_create_success) + "\n" + getString(R.string.add_credential), mNextSuccess, getString(android.R.string.yes), getString(android.R.string.no)); } else { mPopup.show(PopupType.CONFIRM, getString(R.string.info), getString(R.string.user_create_success), mPopupSucess, null, null); } } }; private Callback<ResponseStatus> mModifyUserListener = new Callback<ResponseStatus>() { @Override public void onFailure(Call<ResponseStatus> call, Throwable t) { if (isIgnoreCallback(call, true)) { return; } showErrorPopup(t.getMessage(), false); } @Override public void onResponse(Call<ResponseStatus> call, Response<ResponseStatus> response) { if (isIgnoreCallback(call, response, true)) { return; } if (isInvalidResponse(response, true, false)) { return; } mUserInfo.photo = mBackupPhoto; if (mPhotoStatus == PhotoStatus.DELETE) { mBackupPhoto = null; mUserInfo.photo = null; } if (mUserDataProvider.getLoginUserInfo().user_id.equals(mUserInfo.user_id)) { try { mUserDataProvider.setLoginUserInfo(mUserInfo.clone()); } catch (Exception e) { Log.e(TAG, " " + e.getMessage()); } mCommonDataProvider.simpleLogin(mLoginListener); } if (mLoginPasswordView.getVisibility() == View.VISIBLE) { if (mPasswordData != null) { mUserInfo.password_exist = true; } } mPopup.showWait(mCancelStayListener); request(mUserDataProvider.getUser(mUserInfo.user_id, mUpdateUserListener)); } }; private Callback<BioStarSetting> mSaveListener = new Callback<BioStarSetting>() { @Override public void onFailure(Call<BioStarSetting> call, Throwable t) { if (isIgnoreCallback(call, true)) { return; } showErrorPopup(t.getMessage(), false); } @Override public void onResponse(Call<BioStarSetting> call, Response<BioStarSetting> response) { if (isIgnoreCallback(call, response, true)) { return; } if (isInvalidResponse(response, true, false)) { return; } save(); } }; public UserModifyFragment() { super(); setType(ScreenType.USER_MODIFY); TAG = getClass().getSimpleName() + String.valueOf(System.currentTimeMillis()); } private void save() { if (mInvalidChecker.isEmptyString(getString(R.string.info), getString(R.string.user_create_empty), mUserIDView.content.toString2())) { mPopup.dismissWiat(); return; } if (mCommonDataProvider.isAlphaNumericUserID() == false) { try { long userId = Long.valueOf(mUserIDView.content.toString2()); if (userId < 1 || userId > 4294967294L || mUserIDView.content.toString2().startsWith("0")) { mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.invalid_userid), null, null, null); resetUserIDFilter(); return; } } catch (Exception e) { mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.invalid_userid), null, null, null); resetUserIDFilter(); return; } } else if ( mUserIDView.content.toString2().equals("0")){ mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.invalid_input_data), null, null, null); return; } if (mLoginIDView.getVisibility() == View.VISIBLE) { if (mInvalidChecker.isEmptyString(getString(R.string.info), getString(R.string.user_create_empty_idpassword), mLoginIDView.content.toString2())) { mPopup.dismissWiat(); return; } if (!mUserInfo.password_exist) { if (mPasswordData == null) { mPopup.dismissWiat(); mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.user_create_empty_idpassword), null, null, null); return; } } } if (mInvalidChecker.isInvalidEmail(getString(R.string.info), getString(R.string.invalid_email), mEmailView.content.toString2())) { mPopup.dismissWiat(); return; } if (mPinData != null && mPinData.length() > 0 && mPinData.length() < 4) { mPopup.dismissWiat(); mPopup.show(PopupType.ALERT, getString(R.string.info), getString(R.string.pincount), null, null, null); return; } UpdateClone(); mPopup.showWait(mCancelStayListener); if (mIsNewUser) { request(mUserDataProvider.createUser(mUserInfo, mCreateUserListener)); } else { request(mUserDataProvider.modifyUser(mUserInfo, mModifyUserListener)); } } private void UpdateClone() { mUserInfo.user_id = mUserIDView.content.toString2(); if (VersionData.getCloudVersion(mActivity) > 1) { if (mUserInfo.permission != null) { mUserInfo.password = mPasswordData; if (mLoginIDView.content.toString2().equals("")) { mUserInfo.login_id = null; } else { mUserInfo.login_id = mLoginIDView.content.toString2(); } } else { mUserInfo.login_id = null; mUserInfo.password = null; } } else { if (mUserInfo.roles != null && mUserInfo.roles.size() > 0) { mUserInfo.password = mPasswordData; if (mLoginIDView.content.toString2().equals("")) { mUserInfo.login_id = null; } else { mUserInfo.login_id = mLoginIDView.content.toString2(); } } else { mUserInfo.login_id = null; mUserInfo.password = null; } } BaseUserGroup userGroup = mUserInfo.user_group; if (userGroup == null) { userGroup = new BaseUserGroup((String) mUserGroupView.getTag(), mUserGroupView.content.toString2()); } else { userGroup.name = mUserGroupView.content.toString2(); userGroup.id = (String) mUserGroupView.content.getTag(); } mUserInfo.user_group = userGroup; mUserInfo.name = mUserNameView.content.toString2(); mUserInfo.email = mEmailView.content.toString2(); mUserInfo.phone_number = mTelephoneView.content.toString2(); if (mPinSwitch.getOn()) { if (mPinData != null && mPinData.length() > 3) { mUserInfo.pin_exist = true; mUserInfo.pin = mPinData; } } else { mUserInfo.pin_exist = false; mUserInfo.pin = ""; } mUserInfo.setTimeFormmat(mDateTimeDataProvider, User.UserTimeType.start_datetime, DateTimeDataProvider.DATE_TYPE.FORMAT_DATE, mDateStartView.toString2()); mUserInfo.setTimeFormmat(mDateTimeDataProvider, User.UserTimeType.expiry_datetime, DateTimeDataProvider.DATE_TYPE.FORMAT_DATE, mDateEndView.toString2()); switch (mPhotoStatus) { case NOT_MODIFY: if (mUserInfo.photo != null) { mBackupPhoto = mUserInfo.photo; mUserInfo.photo_exist = true; } mUserInfo.photo = null; break; case MODIFY: mUserInfo.photo_exist = true; break; case DELETE: mUserInfo.photo_exist = false; mUserInfo.photo = ""; break; } if (mUserInfo.photo != null && !mUserInfo.photo.isEmpty()) { mUserInfo.photo_exist = true; } } private void createUser() { if (mUserInfo == null) { mUserInfo = new User(); mUserInfo.setDefaultValue(); mUserInfo.access_groups = new ArrayList<ListAccessGroup>(); mIsNewUser = true; Calendar cal = Calendar.getInstance(); mStartYear = 2001; mStartMonth = 0; mStartDay = 1; mEndYear = 2030; mEndMonth = 11; mEndDay = 31; cal.set(Calendar.YEAR, mStartYear); cal.set(Calendar.MONTH, mStartMonth); cal.set(Calendar.DAY_OF_MONTH, mStartDay); mUserInfo.setTimeCalendar(mDateTimeDataProvider, User.UserTimeType.start_datetime, cal); cal.set(Calendar.YEAR, mEndYear); cal.set(Calendar.MONTH, mEndMonth); cal.set(Calendar.DAY_OF_MONTH, mEndDay); mUserInfo.setTimeCalendar(mDateTimeDataProvider, User.UserTimeType.expiry_datetime, cal); if (mInitUserGroup != null) { mUserInfo.user_group = mInitUserGroup; } } else { Calendar cal = mUserInfo.getTimeCalendar(mDateTimeDataProvider, User.UserTimeType.expiry_datetime); if (cal == null) { cal = Calendar.getInstance(); } int year = cal.get(Calendar.YEAR); if (year > 2030) { cal.set(Calendar.YEAR, 2030); mUserInfo.setTimeCalendar(mDateTimeDataProvider, User.UserTimeType.expiry_datetime, cal); } mEndYear = cal.get(Calendar.YEAR); mEndMonth = cal.get(Calendar.MONTH); mEndDay = cal.get(Calendar.DAY_OF_MONTH); cal = mUserInfo.getTimeCalendar(mDateTimeDataProvider, User.UserTimeType.start_datetime); if (cal == null) { cal = Calendar.getInstance(); } year = cal.get(Calendar.YEAR); if (year < 2000) { cal.set(Calendar.YEAR, 2000); mUserInfo.setTimeCalendar(mDateTimeDataProvider, User.UserTimeType.start_datetime, cal); } mStartYear = cal.get(Calendar.YEAR); mStartMonth = cal.get(Calendar.MONTH); mStartDay = cal.get(Calendar.DAY_OF_MONTH); if (mUserInfo.access_groups == null) { mUserInfo.access_groups = new ArrayList<ListAccessGroup>(); } } } private void editAccessGroup() { Bundle bundle = new Bundle(); try { bundle.putSerializable(User.TAG, mUserInfo.clone()); bundle.putSerializable(Setting.DISABLE_MODIFY, false); } catch (CloneNotSupportedException e) { e.printStackTrace(); return; } mScreenControl.addScreen(ScreenType.USER_ACCESS_GROUP, bundle); } private void editCard() { Bundle bundle = new Bundle(); try { bundle.putSerializable(User.TAG, mUserInfo.clone()); bundle.putSerializable(Setting.DISABLE_MODIFY, false); } catch (CloneNotSupportedException e) { e.printStackTrace(); return; } mScreenControl.addScreen(ScreenType.CARD, bundle); } private void editFace() { if (VersionData.getCloudVersion(mActivity) > 1) { Bundle bundle = new Bundle(); try { bundle.putSerializable(User.TAG, mUserInfo.clone()); bundle.putSerializable(Setting.DISABLE_MODIFY, false); } catch (CloneNotSupportedException e) { e.printStackTrace(); return; } mScreenControl.addScreen(ScreenType.FACE, bundle); } } private void editFingerPrint() { Bundle bundle = new Bundle(); try { bundle.putSerializable(User.TAG, mUserInfo.clone()); bundle.putSerializable(Setting.DISABLE_MODIFY, false); } catch (CloneNotSupportedException e) { e.printStackTrace(); return; } mScreenControl.addScreen(ScreenType.FINGERPRINT_REGISTER, bundle); } private void editOperator() { // Bundle bundle = new Bundle(); // try { // bundle.putSerializable(User.TAG, mUserInfo.clone()); // bundle.putSerializable(Setting.DISABLE_MODIFY, false); // } catch (CloneNotSupportedException e) { // e.printStackTrace(); // return; // } // mScreenControl.addScreen(ScreenType.USER_PERMISSION, bundle); if (VersionData.getCloudVersion(mActivity) > 1) { //TODO SelectPopup<UserPermission> selectCloudRolePopup = new SelectPopup<UserPermission>(mActivity, mPopup); selectCloudRolePopup.show(SelectPopup.SelectType.V2_CLOUD_ROLE, new SelectPopup.OnSelectResultListener<UserPermission>() { @Override public void OnResult(ArrayList<UserPermission> selectedItem, boolean isPositive) { if (isInValidCheck()) { return; } if (selectedItem == null || selectedItem.size() < 1) { return; } UserPermission item = null; try { item = selectedItem.get(0).clone(); } catch (Exception e) { } if (item.id.equals(Setting.NONE_ITEM)) { mUserInfo.permission = null; } else { mUserInfo.permission = item; } setPermission(); } }, null, getString(R.string.select) + " " + getString(R.string.operator), false); } else { SelectPopup<CloudRole> selectCloudRolePopup = new SelectPopup<CloudRole>(mActivity, mPopup); selectCloudRolePopup.show(SelectPopup.SelectType.CLOUD_ROLE, new SelectPopup.OnSelectResultListener<CloudRole>() { @Override public void OnResult(ArrayList<CloudRole> selectedItem, boolean isPositive) { if (isInValidCheck()) { return; } if (selectedItem == null || selectedItem.size() < 1) { return; } if (selectedItem.get(0).code.equals(Setting.NONE_ITEM)) { selectedItem = new ArrayList<CloudRole>(); } if (mUserInfo != null) { mUserInfo.roles = (ArrayList<CloudRole>) selectedItem.clone(); } setPermission(); } }, null, getString(R.string.select) + " " + getString(R.string.operator), false); } } private void editUserGroup() { SelectPopup<UserGroup> selectPopup = new SelectPopup<UserGroup>(mActivity, mPopup); selectPopup.show(SelectPopup.SelectType.USER_GROUPS, new SelectPopup.OnSelectResultListener<UserGroup>() { @Override public void OnResult(ArrayList<UserGroup> selectedItem, boolean isPositive) { if (isInValidCheck()) { return; } if (selectedItem == null) { return; } UserGroup userGroup = selectedItem.get(0); mUserGroupView.content.setText(userGroup.name); mUserGroupView.content.setTag(userGroup.id); } }, null, getString(R.string.select_user_group), false, true); } @Override public void onAllow(int requestCode) { if (mHandler == null || requestCode != Setting.REQUEST_EXTERNAL_STORAGE) { return; } mHandler.removeCallbacks(mRunRditUserImage); mHandler.postDelayed(mRunRditUserImage, 1000); } @Override public void onDeny(int requestCode) { if (mHandler == null || requestCode != Setting.REQUEST_EXTERNAL_STORAGE) { return; } mHandler.removeCallbacks(mRunDeny); mHandler.postDelayed(mRunDeny, 1000); } private void editUserImage() { if (Build.VERSION.SDK_INT >= 23) { if ((ActivityCompat.checkSelfPermission(mActivity, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) || (ActivityCompat.checkSelfPermission(mActivity, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED)) { ActivityCompat.requestPermissions(mActivity, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.READ_EXTERNAL_STORAGE}, Setting.REQUEST_EXTERNAL_STORAGE); return; } } SelectPopup<SelectCustomData> selectPopup = new SelectPopup<SelectCustomData>(mActivity, mPopup); ArrayList<SelectCustomData> linkType = new ArrayList<SelectCustomData>(); linkType.add(new SelectCustomData(getString(R.string.take_picture), TAKE_PICTURE, false)); linkType.add(new SelectCustomData(getString(R.string.from_gallery), FROM_GALLERY, false)); linkType.add(new SelectCustomData(getString(R.string.delete_picture), DELETE_PICTURE, false)); selectPopup.show(SelectPopup.SelectType.CUSTOM, new SelectPopup.OnSelectResultListener<SelectCustomData>() { @Override public void OnResult(ArrayList<SelectCustomData> selectedItem, boolean isPositive) { if (isInValidCheck()) { return; } if (selectedItem == null) { return; } int type = selectedItem.get(0).getIntId(); switch (type) { case TAKE_PICTURE: { Intent intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE); intent.putExtra(android.provider.MediaStore.EXTRA_OUTPUT, ImageUtil.getTempFileUri()); startActivityForResult(intent, REQ_ACTIVITY_CAMERA); break; } case FROM_GALLERY: { Intent intent = ImageUtil.getImageActionIntent(Intent.ACTION_PICK, false, Setting.USER_PROFILE_IMAGE_SIZE, Setting.USER_PROFILE_IMAGE_SIZE); startActivityForResult(intent, REQ_ACTIVITY_CAMERA_CROP); break; } case DELETE_PICTURE: { mUserInfo.photo = ""; mPhotoStatus = PhotoStatus.DELETE; mSummaryUserView.setUserPhotoDefault(); mSummaryUserView.setBlurBackGroudDefault(); mBackupPhoto = null; if (mBmpBlur != null) { mBmpBlur.recycle(); mBmpBlur = null; } if (mBmpRound != null) { mBmpRound.recycle(); mBmpRound = null; } break; } } } }, linkType, getString(R.string.edit_photo), false); } public void getImageUrlWithAuthority(Uri uri) { if (uri == null) { return; } InputStream is = null; if (uri.getAuthority() != null) { try { is = mActivity.getContentResolver().openInputStream(uri); Bitmap bmp = BitmapFactory.decodeStream(is); setImage(ImageUtil.resizeBitmap(bmp, Setting.USER_PROFILE_IMAGE_SIZE, true)); } catch (FileNotFoundException e) { e.printStackTrace(); } finally { try { is.close(); } catch (IOException e) { e.printStackTrace(); } } } return; } private void initValue(Bundle savedInstanceState) { if (mUserInfo == null) { mUserInfo = getExtraData(User.TAG, savedInstanceState); if (mUserInfo != null) { mUserInfo.backup(); } } if (mTextInputFilter == null) { mTextInputFilter = new TextInputFilter(mImm, mToastPopup); } if (mInitUserGroup == null) { mInitUserGroup = getExtraData(UserGroup.TAG, savedInstanceState); } if (savedInstanceState != null) { int photoMode = savedInstanceState.getInt("photoStatus"); mPhotoStatus = PhotoStatus.values()[photoMode]; mIsNewUser = savedInstanceState.getBoolean("newUser"); if (BuildConfig.DEBUG) { Log.e(TAG, "mPhotoStatus restore:" + mPhotoStatus + " mIsNewUser:" + mIsNewUser); } } mDateTimePicker = new DateTimePicker(getActivity()); mInvalidChecker = new InvalidChecker(mPopup); mSummaryUserView = (SummaryUserView) mRootView.findViewById(R.id.summray_user); mSummaryUserView.init(mSummaryUserViewListener); mUserIDView = (DetailEditItemView) mRootView.findViewById(R.id.user_id); mUserNameView = (DetailEditItemView) mRootView.findViewById(R.id.user_name); mTextInputFilter.setFilter(mUserNameView.content, TextInputFilter.EDIT_TYPE.USER_NAME); mEmailView = (DetailEditItemView) mRootView.findViewById(R.id.email); mTextInputFilter.setFilter( mEmailView.content, TextInputFilter.EDIT_TYPE.EMAIL); mTelephoneView = (DetailEditItemView) mRootView.findViewById(R.id.telephone); mTextInputFilter.setFilter(mTelephoneView.content, TextInputFilter.EDIT_TYPE.TELEPHONE); mOperatorView = (DetailTextItemView) mRootView.findViewById(R.id.operator); mLoginIDView = (DetailEditItemView) mRootView.findViewById(R.id.login_id); mTextInputFilter.setFilter(mLoginIDView.content, TextInputFilter.EDIT_TYPE.LOGIN_ID); mLoginPasswordView = (DetailTextItemView) mRootView.findViewById(R.id.login_password); mDateStartView = (StyledTextView) mRootView.findViewById(R.id.date_start); mDateEndView = (StyledTextView) mRootView.findViewById(R.id.date_end); mUserGroupView = (DetailTextItemView) mRootView.findViewById(R.id.user_group); mStatusView = (DetailSwitchItemView) mRootView.findViewById(R.id.status); // mPeriodView = (DetailEditItemView)mRootView.findViewById(R.id.period); mAccessGroupView = (DetailTextItemView) mRootView.findViewById(R.id.access_group); mFingerPrintView = (DetailTextItemView) mRootView.findViewById(R.id.fingerprint); mCardView = (DetailTextItemView) mRootView.findViewById(R.id.card); mFaceView = (DetailTextItemView) mRootView.findViewById(R.id.face); mPinView = (DetailSwitchItemView) mRootView.findViewById(R.id.pin); createUser(); setView(); if (mIsNewUser) { if (VersionData.getCloudVersion(mActivity) > 1) { mCardView.setVisibility(View.GONE); mFingerPrintView.setVisibility(View.GONE); mFaceView.setVisibility(View.GONE); } } } private boolean isExistImageCheck() { File cropFile = new File(ImageUtil.getTempFilePath()); if (cropFile.exists() == false) { return false; } Bitmap bmp = BitmapFactory.decodeFile(ImageUtil.getTempFilePath()); if (null == bmp) { cropFile.delete(); return false; } cropFile.delete(); setImage(bmp); return true; } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (BuildConfig.DEBUG) { Log.i(TAG, "onActivityResult:" + requestCode + "code:" + resultCode); } if (Activity.RESULT_OK != resultCode) return; switch (requestCode) { case REQ_ACTIVITY_CAMERA: { Intent intent = ImageUtil.getImageActionIntent("com.android.camera.action.CROP", true, Setting.USER_PROFILE_IMAGE_SIZE, Setting.USER_PROFILE_IMAGE_SIZE); startActivityForResult(intent, REQ_ACTIVITY_CAMERA_CROP); break; } case REQ_ACTIVITY_CAMERA_CROP: { if (!isExistImageCheck() && data != null) { getImageUrlWithAuthority(data.getData()); } break; } } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { setResID(R.layout.fragment_user_modify); super.onCreateView(inflater, container, savedInstanceState); if (!mIsReUsed) { initValue(savedInstanceState); String title = mUserInfo.name; if (mIsNewUser) { title = getString(R.string.new_user); } initActionbar(title, R.drawable.action_bar_bg); mRootView.invalidate(); } return mRootView; } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); Log.e(TAG, "onSaveInstanceState"); User bundleItem = null; try { bundleItem = (User) mUserInfo.clone(); } catch (CloneNotSupportedException e) { e.printStackTrace(); return; } outState.putSerializable(User.TAG, bundleItem); outState.putInt("photoStatus", mPhotoStatus.ordinal()); outState.putBoolean("newUser", mIsNewUser); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.save, menu); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); } @Override public void onPause() { if (mUserNameView != null) { hideIme(mUserNameView.content); } super.onPause(); } @Override public void onDestroy() { if (mTextInputFilter != null && mLoginIDView != null) { mTextInputFilter.setFilter(mLoginIDView.content, TextInputFilter.EDIT_TYPE.NONE); } if (mTextInputFilter != null && mUserIDView != null) { mTextInputFilter.setFilter(mUserIDView.content, TextInputFilter.EDIT_TYPE.NONE); } super.onDestroy(); if (mBmpBlur != null) { mBmpBlur.recycle(); mBmpBlur = null; } if (mBmpRound != null) { mBmpRound.recycle(); mBmpRound = null; } } private void resetUserIDFilter() { if (mIsNewUser) { mUserIDView.enableEdit(true); if (VersionData.getCloudVersion(mActivity) > 1) { if (mCommonDataProvider.isAlphaNumericUserID()) { mUserIDView.setInputType(InputType.TYPE_CLASS_TEXT); } else { mUserIDView.setInputType(InputType.TYPE_CLASS_NUMBER); } } else { mUserIDView.setInputType(InputType.TYPE_CLASS_NUMBER); } mTextInputFilter.setFilter(mUserIDView.content, TextInputFilter.EDIT_TYPE.USER_ID); mUserIDView.setOnClickListener(mClickListener); } else { mUserIDView.enableEdit(false); mUserIDView.setOnClickListener(null); } } @Override public boolean onOptionsItemSelected(MenuItem item) { if (super.onOptionsItemSelected(item)) { return true; } switch (item.getItemId()) { case R.id.action_save: if (VersionData.getCloudVersion(mActivity) > 1) { mPopup.showWait(mCancelStayListener); request(mCommonDataProvider.getBioStarSetting(mSaveListener)); } else { save(); } return true; default: break; } return false; } private void setProfileImage() { mSummaryUserView.setBlurBackGroudDefault(); mSummaryUserView.setUserPhotoDefault(); if (mUserInfo.photo != null && !mUserInfo.photo.isEmpty()) { if (mBmpRound != null) { mBmpRound.recycle(); mBmpRound = null; } if (mBmpBlur != null) { mBmpBlur.recycle(); mBmpBlur = null; } byte[] photoByte = Base64.decode(mUserInfo.photo, 0); Bitmap bmp = ImageUtil.byteArrayToBitmap(photoByte); if (bmp != null) { mBmpBlur = ImageUtil.fastBlur(bmp, 32); mSummaryUserView.setBlurBackGroud(mBmpBlur); mBmpRound = ImageUtil.getRoundedBitmap(bmp, true); mSummaryUserView.setUserPhoto(mBmpRound); } } } @Override protected void registerBroadcast() { if (mReceiver == null) { mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { final String action = intent.getAction(); if (mIsDestroy) { return; } if (action.equals(Setting.BROADCAST_UPDATE_FINGER)) { User user = getExtraData(Setting.BROADCAST_UPDATE_FINGER, intent); if (user == null || user.fingerprint_templates == null) { return; } if (mUserInfo != null) { mUserInfo.fingerprint_templates = user.fingerprint_templates; mUserInfo.fingerprint_template_count = user.fingerprint_templates.size(); mUserInfo.fingerprint_count = user.fingerprint_templates.size(); } setFingerCount(); return; } if (action.equals(Setting.BROADCAST_UPDATE_CARD)) { User user = getExtraData(Setting.BROADCAST_UPDATE_CARD, intent); if (user == null || user.cards == null) { return; } if (mUserInfo != null) { mUserInfo.cards = user.cards; mUserInfo.card_count = user.cards.size(); } setCardCount(); return; } if (action.equals(Setting.BROADCAST_UPDATE_FACE)) { User user = getExtraData(Setting.BROADCAST_UPDATE_FACE, intent); if (user == null) { return; } if (mUserInfo != null) { mUserInfo.face_template_count = user.face_template_count; } setFaceCount(); if (mUserInfo.last_modify != null && !mUserInfo.last_modify.equals(user.last_modify)) { if (user.photo != null && !user.photo.isEmpty()) { mPhotoStatus = PhotoStatus.MODIFY; mUserInfo.photo = user.photo; mUserInfo.photo_exist = true; mBackupPhoto = user.photo; setProfileImage(); } } return; } if (action.equals(Setting.BROADCAST_UPDATE_USER_ACCESS_GROUP)) { ArrayList<ListAccessGroup> accessGroups = getExtraData(Setting.BROADCAST_UPDATE_USER_ACCESS_GROUP, intent); if (accessGroups == null) { return; } if (mUserInfo != null) { mUserInfo.access_groups = accessGroups; } setAccessGroupCount(); return; } if (action.equals(Setting.BROADCAST_PREFRENCE_REFRESH)) { if (mActivity == null) { return; } mDateStartView.setText(mUserInfo.getTimeFormmat(mDateTimeDataProvider, User.UserTimeType.start_datetime, DateTimeDataProvider.DATE_TYPE.FORMAT_DATE)); mDateEndView.setText(mUserInfo.getTimeFormmat(mDateTimeDataProvider, User.UserTimeType.expiry_datetime, DateTimeDataProvider.DATE_TYPE.FORMAT_DATE)); return; } if (action.equals(Setting.BROADCAST_UPDATE_PERMISSION)) { ArrayList<CloudRole> permissions = getExtraData(Setting.BROADCAST_UPDATE_PERMISSION, intent); if (permissions == null) { return; } if (mUserInfo != null) { mUserInfo.roles = (ArrayList<CloudRole>) permissions.clone(); } setPermission(); return; } } }; IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(Setting.BROADCAST_UPDATE_FINGER); intentFilter.addAction(Setting.BROADCAST_UPDATE_CARD); intentFilter.addAction(Setting.BROADCAST_UPDATE_FACE); intentFilter.addAction(Setting.BROADCAST_UPDATE_USER_ACCESS_GROUP); intentFilter.addAction(Setting.BROADCAST_PREFRENCE_REFRESH); intentFilter.addAction(Setting.BROADCAST_UPDATE_PERMISSION); LocalBroadcastManager.getInstance(getActivity()).registerReceiver(mReceiver, intentFilter); } } private void selectDatePicker() { SelectPopup<SelectCustomData> selectPopup = new SelectPopup<SelectCustomData>(mActivity, mPopup); ArrayList<SelectCustomData> linkType = new ArrayList<SelectCustomData>(); linkType.add(new SelectCustomData(mActivity.getString(R.string.start_date), 1, false)); linkType.add(new SelectCustomData(mActivity.getString(R.string.end_date), 2, false)); selectPopup.show(SelectPopup.SelectType.CUSTOM, new SelectPopup.OnSelectResultListener<SelectCustomData>() { @Override public void OnResult(ArrayList<SelectCustomData> selectedItem, boolean isPositive) { if (isInValidCheck()) { return; } if (selectedItem == null) { return; } switch (selectedItem.get(0).getIntId()) { case 1: { mDateTimePicker.showDatePicker(mStartDateListener, mStartYear, mStartMonth, mStartDay); break; } case 2: { mDateTimePicker.showDatePicker(mEndDateListener, mEndYear, mEndMonth, mEndDay); break; } default: break; } } }, linkType, mActivity.getString(R.string.select_link), false, false); } private void setAccessGroupCount() { if (mUserInfo.access_groups != null) { mAccessGroupView.content.setText(String.valueOf(mUserInfo.access_groups.size())); } else { mAccessGroupView.content.setText("0"); } } private void setCardCount() { int count = 0; if (VersionData.getCloudVersion(mActivity) > 1) { count = mUserInfo.card_count; } else { if (mUserInfo.cards != null) { count = mUserInfo.cards.size(); } } mCardView.content.setText(String.valueOf(count)); mSummaryUserView.setCardCount(String.valueOf(count)); } private void setFaceCount() { int count = mUserInfo.face_template_count; mFaceView.content.setText(String.valueOf(count)); mSummaryUserView.setFaceCount(String.valueOf(count)); } private void setFingerCount() { int count = 0; if (VersionData.getCloudVersion(mActivity) < 2) { if (mUserInfo.fingerprint_templates != null) { count = mUserInfo.fingerprint_templates.size(); } } else { count = mUserInfo.fingerprint_template_count; } mFingerPrintView.content.setText(String.valueOf(count)); mSummaryUserView.setFingerCount(String.valueOf(count)); } private void setImage(Bitmap bmp) { if (bmp == null) { return; } mSummaryUserView.setBlurBackGroudDefault(); mSummaryUserView.setUserPhotoDefault(); if (mBmpRound != null) { mBmpRound.recycle(); mBmpRound = null; } if (mBmpBlur != null) { mBmpBlur.recycle(); mBmpBlur = null; } mBmpBlur = ImageUtil.fastBlur(bmp, 32); mSummaryUserView.setBlurBackGroud(mBmpBlur); mPhotoStatus = PhotoStatus.MODIFY; Bitmap bmp2 = null; byte[] reSizeByte = ImageUtil.bitmapToByteArray(bmp, 20); if (BuildConfig.DEBUG) { Log.e(TAG, "reSizeByte:" + reSizeByte.length); } if (reSizeByte.length > Setting.USER_PROFILE_IMAGE_SIZE_BYTE) { Log.e(TAG, "reSizeByte2:" + reSizeByte.length); reSizeByte = ImageUtil.bitmapToByteArray(bmp, 0); if (reSizeByte.length > Setting.USER_PROFILE_IMAGE_SIZE_BYTE) { bmp2 = ImageUtil.resizeBitmap(bmp, Setting.USER_PROFILE_IMAGE_SIZE / 2, false); reSizeByte = ImageUtil.bitmapToByteArray(bmp2, 0); Log.e(TAG, "reSizeByte3:" + reSizeByte.length); } } mUserInfo.photo = Base64.encodeToString(reSizeByte, 0); mUserInfo.photo = mUserInfo.photo.replaceAll("\n", ""); mBackupPhoto = mUserInfo.photo; mBmpRound = ImageUtil.getRoundedBitmap(bmp, true); mSummaryUserView.setUserPhoto(mBmpRound); if (bmp2 != null) { bmp2.recycle(); bmp2 = null; } } private void setPermission() { if (VersionData.getCloudVersion(mActivity) > 1) { if (mUserInfo.permission != null) { mLoginIDView.setVisibility(View.VISIBLE); mLoginPasswordView.setVisibility(View.VISIBLE); mOperatorView.content.setText(mUserInfo.permission.name); if (mUserInfo.password_exist || (mPasswordData != null && !mPasswordData.isEmpty())) { mLoginPasswordView.content.setText(getString(R.string.password_display)); } else { mLoginPasswordView.content.setText(""); } } else { mOperatorView.content.setText(getString(R.string.none)); mUserInfo.password_exist = false; mLoginPasswordView.content.setText(""); mLoginIDView.setVisibility(View.GONE); mLoginPasswordView.setVisibility(View.GONE); } } else { if (mUserInfo.roles == null || mUserInfo.roles.size() < 1) { mOperatorView.content.setText(getString(R.string.none)); mUserInfo.password_exist = false; mLoginPasswordView.content.setText(""); mLoginIDView.setVisibility(View.GONE); mLoginPasswordView.setVisibility(View.GONE); } else { mLoginIDView.setVisibility(View.VISIBLE); mLoginPasswordView.setVisibility(View.VISIBLE); int size = mUserInfo.roles.size(); if (size == 1) { mOperatorView.content.setText(mUserInfo.roles.get(0).description); } else if (size > 1) { mOperatorView.content.setText(mUserInfo.roles.get(size).description + " + " + mUserInfo.roles.size()); } if (mUserInfo.password_exist || (mPasswordData != null && !mPasswordData.isEmpty())) { mLoginPasswordView.content.setText(getString(R.string.password_display)); } else { mLoginPasswordView.content.setText(""); } } } } private void setView() { mSummaryUserView.setUserID(mUserInfo.user_id); mSummaryUserView.setUserName(mUserInfo.name); mSummaryUserView.showPin(mUserInfo.pin_exist); mUserIDView.setContentText(mUserInfo.user_id); resetUserIDFilter(); mUserNameView.setContentText(mUserInfo.name); mUserNameView.setOnClickListener(mClickListener); mEmailView.setContentText(mUserInfo.email); mEmailView.setOnClickListener(mClickListener); mTelephoneView.setContentText(mUserInfo.phone_number); mTelephoneView.setOnClickListener(mClickListener); mOperatorView.enableLink(true, mClickListener); setPermission(); mLoginIDView.setContentText(mUserInfo.login_id); mLoginIDView.setOnClickListener(mClickListener); mLoginPasswordView.enableLink(true, mClickListener); mUserGroupView.enableLink(true, mClickListener); if (mUserInfo.user_group != null) { mUserGroupView.content.setText(mUserInfo.user_group.name); mUserGroupView.content.setTag(mUserInfo.user_group.id); } else { mUserGroupView.content.setText(getString(R.string.all_users)); mUserGroupView.content.setTag(String.valueOf(1)); } mStatusView.mContent.setVisibility(View.GONE); mStatusView.setOnClickListener(mClickListener); if (mUserInfo.isActive()) { mStatusView.mIndex.setText(getString(R.string.status) + " " + getString(R.string.active)); } else { mStatusView.mIndex.setText(getString(R.string.status) + " " + getString(R.string.inactive)); } mStatusSwitch = mStatusView.mSwitchView; mStatusSwitch.init(getActivity(), new SwitchView.OnChangeListener() { @Override public boolean onChange(boolean on) { if (BuildConfig.DEBUG) { Log.e(TAG, "status :" + on); } if (on) { mUserInfo.setActive(true); mStatusView.mIndex.setText(getString(R.string.status) + " " + getString(R.string.active)); } else { mUserInfo.setActive(false); mStatusView.mIndex.setText(getString(R.string.status) + " " + getString(R.string.inactive)); } return true; } }, mUserInfo.isActive()); mStatusSwitch.setSwitch(mUserInfo.isActive()); mRootView.findViewById(R.id.date_edit).setOnClickListener(mClickListener); mRootView.findViewById(R.id.date_arrow).setOnClickListener(mClickListener); mDateStartView.setOnClickListener(mClickListener); mDateEndView.setOnClickListener(mClickListener); mDateStartView.setText(mUserInfo.getTimeFormmat(mDateTimeDataProvider, User.UserTimeType.start_datetime, DateTimeDataProvider.DATE_TYPE.FORMAT_DATE)); mDateEndView.setText(mUserInfo.getTimeFormmat(mDateTimeDataProvider, User.UserTimeType.expiry_datetime, DateTimeDataProvider.DATE_TYPE.FORMAT_DATE)); mAccessGroupView.enableLink(true, mClickListener); mFingerPrintView.enableLink(true, mClickListener); mCardView.enableLink(true, mClickListener); mFaceView.enableLink(true, mClickListener); setAccessGroupCount(); setFingerCount(); setCardCount(); setFaceCount(); mPinView.setOnClickListener(mClickListener); mPinSwitch = mPinView.mSwitchView; if (mUserInfo.pin_exist || (mPinData != null && !mPinData.isEmpty())) { mPinView.mContent.setText(getString(R.string.password_display)); } else { mPinView.mContent.setText(""); } mPinSwitch.init(getActivity(), new SwitchView.OnChangeListener() { @Override public boolean onChange(boolean on) { if (BuildConfig.DEBUG) { Log.e(TAG, "pin :" + on); } if (on) { mHandler.post(new Runnable() { @Override public void run() { showPinPasswodPopup(); } }); } else { mPinView.mContent.setText(""); mPinData = ""; } return true; } }, (mUserInfo.pin_exist || (mPinData != null && !mPinData.isEmpty()))); mPinSwitch.setSwitch((mUserInfo.pin_exist || (mPinData != null && !mPinData.isEmpty()))); mSummaryUserView.setUserPhotoDefault(); if (mBmpRound != null) { mSummaryUserView.setUserPhoto(mBmpRound); } else { setProfileImage(); } isExistImageCheck(); } private void showPasswodPopup() { PasswordPopup passwordPopup = new PasswordPopup(mActivity); passwordPopup.show(false, getString(R.string.password), new PasswordPopup.OnPasswordResult() { @Override public void OnResult(String data) { if (isInValidCheck()) { return; } if (data == null) { if (mUserInfo.password_exist || mPasswordData != null) { mLoginPasswordView.content.setText(getString(R.string.password_display)); } else { mLoginPasswordView.content.setText(""); } return; } mLoginPasswordView.content.setText(getString(R.string.password_display)); mPasswordData = data; } }); } private void showPinPasswodPopup() { PasswordPopup passwordPopup = new PasswordPopup(mActivity); passwordPopup.show(true, getString(R.string.pin_upper), new PasswordPopup.OnPasswordResult() { @Override public void OnResult(String data) { if (isInValidCheck()) { return; } if (data == null) { if (mUserInfo.pin_exist || (mPinData != null && !mPinData.isEmpty())) { mPinView.mContent.setText(getString(R.string.password_display)); } else { mPinSwitch.setSwitch(false); } return; } mPinView.mContent.setText(getString(R.string.password_display)); mPinData = data; } }); } private void showUserViewLog() { User bundleItem = null; try { bundleItem = (User) mUserInfo.clone(); } catch (CloneNotSupportedException e) { e.printStackTrace(); return; } Bundle bundle = new Bundle(); bundle.putSerializable(User.TAG, bundleItem); mScreenControl.addScreen(ScreenType.MONITOR, bundle); } private enum PhotoStatus { NOT_MODIFY, MODIFY, DELETE } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.saga; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.Expression; import org.apache.camel.RuntimeCamelException; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A in-memory implementation of a saga coordinator. */ public class InMemorySagaCoordinator implements CamelSagaCoordinator { private enum Status { RUNNING, COMPENSATING, COMPENSATED, COMPLETING, COMPLETED } private static final Logger LOG = LoggerFactory.getLogger(InMemorySagaCoordinator.class); private CamelContext camelContext; private InMemorySagaService sagaService; private String sagaId; private List<CamelSagaStep> steps; private Map<CamelSagaStep, Map<String, Object>> optionValues; private AtomicReference<Status> currentStatus; public InMemorySagaCoordinator(CamelContext camelContext, InMemorySagaService sagaService, String sagaId) { this.camelContext = ObjectHelper.notNull(camelContext, "camelContext"); this.sagaService = ObjectHelper.notNull(sagaService, "sagaService"); this.sagaId = ObjectHelper.notNull(sagaId, "sagaId"); this.steps = new CopyOnWriteArrayList<>(); this.optionValues = new ConcurrentHashMap<>(); this.currentStatus = new AtomicReference<>(Status.RUNNING); } @Override public String getId() { return sagaId; } @Override public CompletableFuture<Void> beginStep(Exchange exchange, CamelSagaStep step) { this.steps.add(step); if (!step.getOptions().isEmpty()) { optionValues.putIfAbsent(step, new ConcurrentHashMap<>()); Map<String, Object> values = optionValues.get(step); for (String option : step.getOptions().keySet()) { Expression expression = step.getOptions().get(option); try { values.put(option, expression.evaluate(exchange, Object.class)); } catch (Exception ex) { return CompletableFuture.supplyAsync(() -> { throw new RuntimeCamelException("Cannot evaluate saga option '" + option + "'", ex); }); } } } if (step.getTimeoutInMilliseconds().isPresent()) { sagaService.getExecutorService().schedule(() -> { boolean doAction = currentStatus.compareAndSet(Status.RUNNING, Status.COMPENSATING); if (doAction) { doCompensate(); } }, step.getTimeoutInMilliseconds().get(), TimeUnit.MILLISECONDS); } return CompletableFuture.completedFuture(null); } @Override public CompletableFuture<Void> compensate() { boolean doAction = currentStatus.compareAndSet(Status.RUNNING, Status.COMPENSATING); if (doAction) { doCompensate(); } else { Status status = currentStatus.get(); if (status != Status.COMPENSATING && status != Status.COMPENSATED) { CompletableFuture<Void> res = new CompletableFuture<>(); res.completeExceptionally(new IllegalStateException("Cannot compensate: status is " + status)); return res; } } return CompletableFuture.completedFuture(null); } @Override public CompletableFuture<Void> complete() { boolean doAction = currentStatus.compareAndSet(Status.RUNNING, Status.COMPLETING); if (doAction) { doComplete(); } else { Status status = currentStatus.get(); if (status != Status.COMPLETING && status != Status.COMPLETED) { CompletableFuture<Void> res = new CompletableFuture<>(); res.completeExceptionally(new IllegalStateException("Cannot complete: status is " + status)); return res; } } return CompletableFuture.completedFuture(null); } public CompletableFuture<Boolean> doCompensate() { return doFinalize(CamelSagaStep::getCompensation, "compensation") .thenApply(res -> { currentStatus.set(Status.COMPENSATED); return res; }); } public CompletableFuture<Boolean> doComplete() { return doFinalize(CamelSagaStep::getCompletion, "completion") .thenApply(res -> { currentStatus.set(Status.COMPLETED); return res; }); } public CompletableFuture<Boolean> doFinalize( Function<CamelSagaStep, Optional<Endpoint>> endpointExtractor, String description) { CompletableFuture<Boolean> result = CompletableFuture.completedFuture(true); for (CamelSagaStep step : reversed(steps)) { Optional<Endpoint> endpoint = endpointExtractor.apply(step); if (endpoint.isPresent()) { result = result.thenCompose( prevResult -> doFinalize(endpoint.get(), step, 0, description).thenApply(res -> prevResult && res)); } } return result.whenComplete((done, ex) -> { if (ex != null) { LOG.error("Cannot finalize " + description + " the saga", ex); } else if (!done) { LOG.warn("Unable to finalize " + description + " for all required steps of the saga " + sagaId); } }); } private CompletableFuture<Boolean> doFinalize(Endpoint endpoint, CamelSagaStep step, int doneAttempts, String description) { Exchange exchange = createExchange(endpoint, step); return CompletableFuture.supplyAsync(() -> { Exchange res = camelContext.createFluentProducerTemplate().to(endpoint).withExchange(exchange).send(); Exception ex = res.getException(); if (ex != null) { throw new RuntimeCamelException(res.getException()); } return true; }, sagaService.getExecutorService()).exceptionally(ex -> { LOG.warn("Exception thrown during " + description + " at " + endpoint.getEndpointUri() + ". Attempt " + (doneAttempts + 1) + " of " + sagaService.getMaxRetryAttempts(), ex); return false; }).thenCompose(executed -> { int currentAttempt = doneAttempts + 1; if (executed) { return CompletableFuture.completedFuture(true); } else if (currentAttempt >= sagaService.getMaxRetryAttempts()) { return CompletableFuture.completedFuture(false); } else { CompletableFuture<Boolean> future = new CompletableFuture<>(); sagaService.getExecutorService().schedule(() -> { doFinalize(endpoint, step, currentAttempt, description).whenComplete((res, ex) -> { if (ex != null) { future.completeExceptionally(ex); } else { future.complete(res); } }); }, sagaService.getRetryDelayInMilliseconds(), TimeUnit.MILLISECONDS); return future; } }); } private Exchange createExchange(Endpoint endpoint, CamelSagaStep step) { Exchange exchange = endpoint.createExchange(); exchange.getIn().setHeader(Exchange.SAGA_LONG_RUNNING_ACTION, getId()); Map<String, Object> values = optionValues.get(step); if (values != null) { for (Map.Entry<String, Object> entry : values.entrySet()) { exchange.getIn().setHeader(entry.getKey(), entry.getValue()); } } return exchange; } private <T> List<T> reversed(List<T> list) { List<T> reversed = new ArrayList<>(list); Collections.reverse(reversed); return reversed; } }
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1997-1999 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Roger Lawrence * Mike McCabe * Igor Bukanov * Ethan Hugg * Bob Jervis * Terry Lucas * Milen Nankov * * Alternatively, the contents of this file may be used under the terms of * the GNU General Public License Version 2 or later (the "GPL"), in which * case the provisions of the GPL are applicable instead of those above. If * you wish to allow use of your version of this file only under the terms of * the GPL and not to allow others to use your version of this file under the * MPL, indicate your decision by deleting the provisions above and replacing * them with the notice and other provisions required by the GPL. If you do * not delete the provisions above, a recipient may use your version of this * file under either the MPL or the GPL. * * ***** END LICENSE BLOCK ***** */ package org.mozilla.javascript; import java.io.*; /** * This class implements the JavaScript scanner. * * It is based on the C source files jsscan.c and jsscan.h * in the jsref package. * * @see Parser * * @author Mike McCabe * @author Brendan Eich */ class TokenStream { /* * For chars - because we need something out-of-range * to check. (And checking EOF by exception is annoying.) * Note distinction from EOF token type! */ private final static int EOF_CHAR = -1; TokenStream(Parser parser, Reader sourceReader, String sourceString, int lineno) { this.parser = parser; this.lineno = lineno; if (sourceReader != null) { if (sourceString != null) Kit.codeBug(); this.sourceReader = sourceReader; this.sourceBuffer = new char[512]; this.sourceEnd = 0; } else { if (sourceString == null) Kit.codeBug(); this.sourceString = sourceString; this.sourceEnd = sourceString.length(); } this.sourceCursor = 0; } /* This function uses the cached op, string and number fields in * TokenStream; if getToken has been called since the passed token * was scanned, the op or string printed may be incorrect. */ String tokenToString(int token) { if (Token.printTrees) { String name = Token.name(token); switch (token) { case Token.STRING: case Token.REGEXP: case Token.NAME: return name + " `" + this.string + "'"; case Token.NUMBER: return "NUMBER " + this.number; } return name; } return ""; } static boolean isKeyword(String s) { return Token.EOF != stringToKeyword(s); } private static int stringToKeyword(String name) { // #string_id_map# // The following assumes that Token.EOF == 0 final int Id_break = Token.BREAK, Id_case = Token.CASE, Id_continue = Token.CONTINUE, Id_default = Token.DEFAULT, Id_delete = Token.DELPROP, Id_do = Token.DO, Id_else = Token.ELSE, Id_export = Token.EXPORT, Id_false = Token.FALSE, Id_for = Token.FOR, Id_function = Token.FUNCTION, Id_if = Token.IF, Id_in = Token.IN, Id_let = Token.LET, Id_new = Token.NEW, Id_null = Token.NULL, Id_return = Token.RETURN, Id_switch = Token.SWITCH, Id_this = Token.THIS, Id_true = Token.TRUE, Id_typeof = Token.TYPEOF, Id_var = Token.VAR, Id_void = Token.VOID, Id_while = Token.WHILE, Id_with = Token.WITH, Id_yield = Token.YIELD, // the following are #ifdef RESERVE_JAVA_KEYWORDS in jsscan.c Id_abstract = Token.RESERVED, Id_boolean = Token.RESERVED, Id_byte = Token.RESERVED, Id_catch = Token.CATCH, Id_char = Token.RESERVED, Id_class = Token.RESERVED, Id_const = Token.CONST, Id_debugger = Token.DEBUGGER, Id_double = Token.RESERVED, Id_enum = Token.RESERVED, Id_extends = Token.RESERVED, Id_final = Token.RESERVED, Id_finally = Token.FINALLY, Id_float = Token.RESERVED, Id_goto = Token.RESERVED, Id_implements = Token.RESERVED, Id_import = Token.IMPORT, Id_instanceof = Token.INSTANCEOF, Id_int = Token.RESERVED, Id_interface = Token.RESERVED, Id_long = Token.RESERVED, Id_native = Token.RESERVED, Id_package = Token.RESERVED, Id_private = Token.RESERVED, Id_protected = Token.RESERVED, Id_public = Token.RESERVED, Id_short = Token.RESERVED, Id_static = Token.RESERVED, Id_super = Token.RESERVED, Id_synchronized = Token.RESERVED, Id_throw = Token.THROW, Id_throws = Token.RESERVED, Id_transient = Token.RESERVED, Id_try = Token.TRY, Id_volatile = Token.RESERVED; int id; String s = name; // #generated# Last update: 2007-04-18 13:53:30 PDT L0: { id = 0; String X = null; int c; L: switch (s.length()) { case 2: c=s.charAt(1); if (c=='f') { if (s.charAt(0)=='i') {id=Id_if; break L0;} } else if (c=='n') { if (s.charAt(0)=='i') {id=Id_in; break L0;} } else if (c=='o') { if (s.charAt(0)=='d') {id=Id_do; break L0;} } break L; case 3: switch (s.charAt(0)) { case 'f': if (s.charAt(2)=='r' && s.charAt(1)=='o') {id=Id_for; break L0;} break L; case 'i': if (s.charAt(2)=='t' && s.charAt(1)=='n') {id=Id_int; break L0;} break L; case 'l': if (s.charAt(2)=='t' && s.charAt(1)=='e') {id=Id_let; break L0;} break L; case 'n': if (s.charAt(2)=='w' && s.charAt(1)=='e') {id=Id_new; break L0;} break L; case 't': if (s.charAt(2)=='y' && s.charAt(1)=='r') {id=Id_try; break L0;} break L; case 'v': if (s.charAt(2)=='r' && s.charAt(1)=='a') {id=Id_var; break L0;} break L; } break L; case 4: switch (s.charAt(0)) { case 'b': X="byte";id=Id_byte; break L; case 'c': c=s.charAt(3); if (c=='e') { if (s.charAt(2)=='s' && s.charAt(1)=='a') {id=Id_case; break L0;} } else if (c=='r') { if (s.charAt(2)=='a' && s.charAt(1)=='h') {id=Id_char; break L0;} } break L; case 'e': c=s.charAt(3); if (c=='e') { if (s.charAt(2)=='s' && s.charAt(1)=='l') {id=Id_else; break L0;} } else if (c=='m') { if (s.charAt(2)=='u' && s.charAt(1)=='n') {id=Id_enum; break L0;} } break L; case 'g': X="goto";id=Id_goto; break L; case 'l': X="long";id=Id_long; break L; case 'n': X="null";id=Id_null; break L; case 't': c=s.charAt(3); if (c=='e') { if (s.charAt(2)=='u' && s.charAt(1)=='r') {id=Id_true; break L0;} } else if (c=='s') { if (s.charAt(2)=='i' && s.charAt(1)=='h') {id=Id_this; break L0;} } break L; case 'v': X="void";id=Id_void; break L; case 'w': X="with";id=Id_with; break L; } break L; case 5: switch (s.charAt(2)) { case 'a': X="class";id=Id_class; break L; case 'e': c=s.charAt(0); if (c=='b') { X="break";id=Id_break; } else if (c=='y') { X="yield";id=Id_yield; } break L; case 'i': X="while";id=Id_while; break L; case 'l': X="false";id=Id_false; break L; case 'n': c=s.charAt(0); if (c=='c') { X="const";id=Id_const; } else if (c=='f') { X="final";id=Id_final; } break L; case 'o': c=s.charAt(0); if (c=='f') { X="float";id=Id_float; } else if (c=='s') { X="short";id=Id_short; } break L; case 'p': X="super";id=Id_super; break L; case 'r': X="throw";id=Id_throw; break L; case 't': X="catch";id=Id_catch; break L; } break L; case 6: switch (s.charAt(1)) { case 'a': X="native";id=Id_native; break L; case 'e': c=s.charAt(0); if (c=='d') { X="delete";id=Id_delete; } else if (c=='r') { X="return";id=Id_return; } break L; case 'h': X="throws";id=Id_throws; break L; case 'm': X="import";id=Id_import; break L; case 'o': X="double";id=Id_double; break L; case 't': X="static";id=Id_static; break L; case 'u': X="public";id=Id_public; break L; case 'w': X="switch";id=Id_switch; break L; case 'x': X="export";id=Id_export; break L; case 'y': X="typeof";id=Id_typeof; break L; } break L; case 7: switch (s.charAt(1)) { case 'a': X="package";id=Id_package; break L; case 'e': X="default";id=Id_default; break L; case 'i': X="finally";id=Id_finally; break L; case 'o': X="boolean";id=Id_boolean; break L; case 'r': X="private";id=Id_private; break L; case 'x': X="extends";id=Id_extends; break L; } break L; case 8: switch (s.charAt(0)) { case 'a': X="abstract";id=Id_abstract; break L; case 'c': X="continue";id=Id_continue; break L; case 'd': X="debugger";id=Id_debugger; break L; case 'f': X="function";id=Id_function; break L; case 'v': X="volatile";id=Id_volatile; break L; } break L; case 9: c=s.charAt(0); if (c=='i') { X="interface";id=Id_interface; } else if (c=='p') { X="protected";id=Id_protected; } else if (c=='t') { X="transient";id=Id_transient; } break L; case 10: c=s.charAt(1); if (c=='m') { X="implements";id=Id_implements; } else if (c=='n') { X="instanceof";id=Id_instanceof; } break L; case 12: X="synchronized";id=Id_synchronized; break L; } if (X!=null && X!=s && !X.equals(s)) id = 0; } // #/generated# // #/string_id_map# if (id == 0) { return Token.EOF; } return id & 0xff; } final int getLineno() { return lineno; } final String getString() { return string; } final double getNumber() { return number; } final boolean eof() { return hitEOF; } final int getToken() throws IOException { int c; retry: for (;;) { // Eat whitespace, possibly sensitive to newlines. for (;;) { c = getChar(); if (c == EOF_CHAR) { return Token.EOF; } else if (c == '\n') { dirtyLine = false; return Token.EOL; } else if (!isJSSpace(c)) { if (c != '-') { dirtyLine = true; } break; } } if (c == '@') return Token.XMLATTR; // identifier/keyword/instanceof? // watch out for starting with a <backslash> boolean identifierStart; boolean isUnicodeEscapeStart = false; if (c == '\\') { c = getChar(); if (c == 'u') { identifierStart = true; isUnicodeEscapeStart = true; stringBufferTop = 0; } else { identifierStart = false; ungetChar(c); c = '\\'; } } else { identifierStart = Character.isJavaIdentifierStart((char)c); if (identifierStart) { stringBufferTop = 0; addToString(c); } } if (identifierStart) { boolean containsEscape = isUnicodeEscapeStart; for (;;) { if (isUnicodeEscapeStart) { // strictly speaking we should probably push-back // all the bad characters if the <backslash>uXXXX // sequence is malformed. But since there isn't a // correct context(is there?) for a bad Unicode // escape sequence in an identifier, we can report // an error here. int escapeVal = 0; for (int i = 0; i != 4; ++i) { c = getChar(); escapeVal = Kit.xDigitToInt(c, escapeVal); // Next check takes care about c < 0 and bad escape if (escapeVal < 0) { break; } } if (escapeVal < 0) { parser.addError("msg.invalid.escape"); return Token.ERROR; } addToString(escapeVal); isUnicodeEscapeStart = false; } else { c = getChar(); if (c == '\\') { c = getChar(); if (c == 'u') { isUnicodeEscapeStart = true; containsEscape = true; } else { parser.addError("msg.illegal.character"); return Token.ERROR; } } else { if (c == EOF_CHAR || !Character.isJavaIdentifierPart((char)c)) { break; } addToString(c); } } } ungetChar(c); String str = getStringFromBuffer(); if (!containsEscape) { // OPT we shouldn't have to make a string (object!) to // check if it's a keyword. // Return the corresponding token if it's a keyword int result = stringToKeyword(str); if (result != Token.EOF) { if ((result == Token.LET || result == Token.YIELD) && parser.compilerEnv.getLanguageVersion() < Context.VERSION_1_7) { // LET and YIELD are tokens only in 1.7 and later result = Token.NAME; } if (result != Token.RESERVED) { return result; } else if (!parser.compilerEnv. isReservedKeywordAsIdentifier()) { return result; } else { // If implementation permits to use future reserved // keywords in violation with the EcmaScript, // treat it as name but issue warning parser.addWarning("msg.reserved.keyword", str); } } } this.string = (String)allStrings.intern(str); return Token.NAME; } // is it a number? if (isDigit(c) || (c == '.' && isDigit(peekChar()))) { stringBufferTop = 0; int base = 10; if (c == '0') { c = getChar(); if (c == 'x' || c == 'X') { base = 16; c = getChar(); } else if (isDigit(c)) { base = 8; } else { addToString('0'); } } if (base == 16) { while (0 <= Kit.xDigitToInt(c, 0)) { addToString(c); c = getChar(); } } else { while ('0' <= c && c <= '9') { /* * We permit 08 and 09 as decimal numbers, which * makes our behavior a superset of the ECMA * numeric grammar. We might not always be so * permissive, so we warn about it. */ if (base == 8 && c >= '8') { parser.addWarning("msg.bad.octal.literal", c == '8' ? "8" : "9"); base = 10; } addToString(c); c = getChar(); } } boolean isInteger = true; if (base == 10 && (c == '.' || c == 'e' || c == 'E')) { isInteger = false; if (c == '.') { do { addToString(c); c = getChar(); } while (isDigit(c)); } if (c == 'e' || c == 'E') { addToString(c); c = getChar(); if (c == '+' || c == '-') { addToString(c); c = getChar(); } if (!isDigit(c)) { parser.addError("msg.missing.exponent"); return Token.ERROR; } do { addToString(c); c = getChar(); } while (isDigit(c)); } } ungetChar(c); String numString = getStringFromBuffer(); double dval; if (base == 10 && !isInteger) { try { // Use Java conversion to number from string... dval = Double.valueOf(numString).doubleValue(); } catch (NumberFormatException ex) { parser.addError("msg.caught.nfe"); return Token.ERROR; } } else { dval = ScriptRuntime.stringToNumber(numString, 0, base); } this.number = dval; return Token.NUMBER; } // is it a string? if (c == '"' || c == '\'') { // We attempt to accumulate a string the fast way, by // building it directly out of the reader. But if there // are any escaped characters in the string, we revert to // building it out of a StringBuffer. int quoteChar = c; stringBufferTop = 0; c = getChar(); strLoop: while (c != quoteChar) { if (c == '\n' || c == EOF_CHAR) { ungetChar(c); parser.addError("msg.unterminated.string.lit"); return Token.ERROR; } if (c == '\\') { // We've hit an escaped character int escapeVal; c = getChar(); switch (c) { case 'b': c = '\b'; break; case 'f': c = '\f'; break; case 'n': c = '\n'; break; case 'r': c = '\r'; break; case 't': c = '\t'; break; // \v a late addition to the ECMA spec, // it is not in Java, so use 0xb case 'v': c = 0xb; break; case 'u': // Get 4 hex digits; if the u escape is not // followed by 4 hex digits, use 'u' + the // literal character sequence that follows. int escapeStart = stringBufferTop; addToString('u'); escapeVal = 0; for (int i = 0; i != 4; ++i) { c = getChar(); escapeVal = Kit.xDigitToInt(c, escapeVal); if (escapeVal < 0) { continue strLoop; } addToString(c); } // prepare for replace of stored 'u' sequence // by escape value stringBufferTop = escapeStart; c = escapeVal; break; case 'x': // Get 2 hex digits, defaulting to 'x'+literal // sequence, as above. c = getChar(); escapeVal = Kit.xDigitToInt(c, 0); if (escapeVal < 0) { addToString('x'); continue strLoop; } else { int c1 = c; c = getChar(); escapeVal = Kit.xDigitToInt(c, escapeVal); if (escapeVal < 0) { addToString('x'); addToString(c1); continue strLoop; } else { // got 2 hex digits c = escapeVal; } } break; case '\n': // Remove line terminator after escape to follow // SpiderMonkey and C/C++ c = getChar(); continue strLoop; default: if ('0' <= c && c < '8') { int val = c - '0'; c = getChar(); if ('0' <= c && c < '8') { val = 8 * val + c - '0'; c = getChar(); if ('0' <= c && c < '8' && val <= 037) { // c is 3rd char of octal sequence only // if the resulting val <= 0377 val = 8 * val + c - '0'; c = getChar(); } } ungetChar(c); c = val; } } } addToString(c); c = getChar(); } String str = getStringFromBuffer(); this.string = (String)allStrings.intern(str); return Token.STRING; } switch (c) { case ';': return Token.SEMI; case '[': return Token.LB; case ']': return Token.RB; case '{': return Token.LC; case '}': return Token.RC; case '(': return Token.LP; case ')': return Token.RP; case ',': return Token.COMMA; case '?': return Token.HOOK; case ':': if (matchChar(':')) { return Token.COLONCOLON; } else { return Token.COLON; } case '.': if (matchChar('.')) { return Token.DOTDOT; } else if (matchChar('(')) { return Token.DOTQUERY; } else { return Token.DOT; } case '|': if (matchChar('|')) { return Token.OR; } else if (matchChar('=')) { return Token.ASSIGN_BITOR; } else { return Token.BITOR; } case '^': if (matchChar('=')) { return Token.ASSIGN_BITXOR; } else { return Token.BITXOR; } case '&': if (matchChar('&')) { return Token.AND; } else if (matchChar('=')) { return Token.ASSIGN_BITAND; } else { return Token.BITAND; } case '=': if (matchChar('=')) { if (matchChar('=')) return Token.SHEQ; else return Token.EQ; } else { return Token.ASSIGN; } case '!': if (matchChar('=')) { if (matchChar('=')) return Token.SHNE; else return Token.NE; } else { return Token.NOT; } case '<': /* NB:treat HTML begin-comment as comment-till-eol */ if (matchChar('!')) { if (matchChar('-')) { if (matchChar('-')) { skipLine(); continue retry; } ungetChar('-'); } ungetChar('!'); } if (matchChar('<')) { if (matchChar('=')) { return Token.ASSIGN_LSH; } else { return Token.LSH; } } else { if (matchChar('=')) { return Token.LE; } else { return Token.LT; } } case '>': if (matchChar('>')) { if (matchChar('>')) { if (matchChar('=')) { return Token.ASSIGN_URSH; } else { return Token.URSH; } } else { if (matchChar('=')) { return Token.ASSIGN_RSH; } else { return Token.RSH; } } } else { if (matchChar('=')) { return Token.GE; } else { return Token.GT; } } case '*': if (matchChar('=')) { return Token.ASSIGN_MUL; } else { return Token.MUL; } case '/': // is it a // comment? if (matchChar('/')) { skipLine(); continue retry; } if (matchChar('*')) { boolean lookForSlash = false; /*yuicompressor patch start*/ StringBuffer sb = new StringBuffer(); for (;;) { c = getChar(); if (c == EOF_CHAR) { parser.addError("msg.unterminated.comment"); return Token.ERROR; } sb.append((char) c); if (c == '*') { lookForSlash = true; } else if (c == '/') { if (lookForSlash) { sb.delete(sb.length()-2, sb.length()); String s = sb.toString(); if (s.startsWith("!") || s.startsWith("@cc_on") || s.startsWith("@if") || s.startsWith("@elif") || s.startsWith("@else") || s.startsWith("@end")) { if (s.startsWith("!")) { // Remove the leading '!' this.string = s.substring(1); } else { this.string = s; } return Token.SPECIALCOMMENT; } else { continue retry; } } } else { lookForSlash = false; } } /*yuicompressor patch end*/ } if (matchChar('=')) { return Token.ASSIGN_DIV; } else { return Token.DIV; } case '%': if (matchChar('=')) { return Token.ASSIGN_MOD; } else { return Token.MOD; } case '~': return Token.BITNOT; case '+': if (matchChar('=')) { return Token.ASSIGN_ADD; } else if (matchChar('+')) { return Token.INC; } else { return Token.ADD; } case '-': if (matchChar('=')) { c = Token.ASSIGN_SUB; } else if (matchChar('-')) { if (!dirtyLine) { // treat HTML end-comment after possible whitespace // after line start as comment-utill-eol if (matchChar('>')) { skipLine(); continue retry; } } c = Token.DEC; } else { c = Token.SUB; } dirtyLine = true; return c; default: parser.addError("msg.illegal.character"); return Token.ERROR; } } } private static boolean isAlpha(int c) { // Use 'Z' < 'a' if (c <= 'Z') { return 'A' <= c; } else { return 'a' <= c && c <= 'z'; } } static boolean isDigit(int c) { return '0' <= c && c <= '9'; } /* As defined in ECMA. jsscan.c uses C isspace() (which allows * \v, I think.) note that code in getChar() implicitly accepts * '\r' == \u000D as well. */ static boolean isJSSpace(int c) { if (c <= 127) { return c == 0x20 || c == 0x9 || c == 0xC || c == 0xB; } else { return c == 0xA0 || Character.getType((char)c) == Character.SPACE_SEPARATOR; } } private static boolean isJSFormatChar(int c) { return c > 127 && Character.getType((char)c) == Character.FORMAT; } /** * Parser calls the method when it gets / or /= in literal context. */ void readRegExp(int startToken) throws IOException { stringBufferTop = 0; if (startToken == Token.ASSIGN_DIV) { // Miss-scanned /= addToString('='); } else { if (startToken != Token.DIV) Kit.codeBug(); } int c; while ((c = getChar()) != '/') { if (c == '\n' || c == EOF_CHAR) { ungetChar(c); throw parser.reportError("msg.unterminated.re.lit"); } if (c == '\\') { addToString(c); c = getChar(); } addToString(c); } int reEnd = stringBufferTop; while (true) { if (matchChar('g')) addToString('g'); else if (matchChar('i')) addToString('i'); else if (matchChar('m')) addToString('m'); else break; } if (isAlpha(peekChar())) { throw parser.reportError("msg.invalid.re.flag"); } this.string = new String(stringBuffer, 0, reEnd); this.regExpFlags = new String(stringBuffer, reEnd, stringBufferTop - reEnd); } boolean isXMLAttribute() { return xmlIsAttribute; } int getFirstXMLToken() throws IOException { xmlOpenTagsCount = 0; xmlIsAttribute = false; xmlIsTagContent = false; ungetChar('<'); return getNextXMLToken(); } int getNextXMLToken() throws IOException { stringBufferTop = 0; // remember the XML for (int c = getChar(); c != EOF_CHAR; c = getChar()) { if (xmlIsTagContent) { switch (c) { case '>': addToString(c); xmlIsTagContent = false; xmlIsAttribute = false; break; case '/': addToString(c); if (peekChar() == '>') { c = getChar(); addToString(c); xmlIsTagContent = false; xmlOpenTagsCount--; } break; case '{': ungetChar(c); this.string = getStringFromBuffer(); return Token.XML; case '\'': case '"': addToString(c); if (!readQuotedString(c)) return Token.ERROR; break; case '=': addToString(c); xmlIsAttribute = true; break; case ' ': case '\t': case '\r': case '\n': addToString(c); break; default: addToString(c); xmlIsAttribute = false; break; } if (!xmlIsTagContent && xmlOpenTagsCount == 0) { this.string = getStringFromBuffer(); return Token.XMLEND; } } else { switch (c) { case '<': addToString(c); c = peekChar(); switch (c) { case '!': c = getChar(); // Skip ! addToString(c); c = peekChar(); switch (c) { case '-': c = getChar(); // Skip - addToString(c); c = getChar(); if (c == '-') { addToString(c); if(!readXmlComment()) return Token.ERROR; } else { // throw away the string in progress stringBufferTop = 0; this.string = null; parser.addError("msg.XML.bad.form"); return Token.ERROR; } break; case '[': c = getChar(); // Skip [ addToString(c); if (getChar() == 'C' && getChar() == 'D' && getChar() == 'A' && getChar() == 'T' && getChar() == 'A' && getChar() == '[') { addToString('C'); addToString('D'); addToString('A'); addToString('T'); addToString('A'); addToString('['); if (!readCDATA()) return Token.ERROR; } else { // throw away the string in progress stringBufferTop = 0; this.string = null; parser.addError("msg.XML.bad.form"); return Token.ERROR; } break; default: if(!readEntity()) return Token.ERROR; break; } break; case '?': c = getChar(); // Skip ? addToString(c); if (!readPI()) return Token.ERROR; break; case '/': // End tag c = getChar(); // Skip / addToString(c); if (xmlOpenTagsCount == 0) { // throw away the string in progress stringBufferTop = 0; this.string = null; parser.addError("msg.XML.bad.form"); return Token.ERROR; } xmlIsTagContent = true; xmlOpenTagsCount--; break; default: // Start tag xmlIsTagContent = true; xmlOpenTagsCount++; break; } break; case '{': ungetChar(c); this.string = getStringFromBuffer(); return Token.XML; default: addToString(c); break; } } } stringBufferTop = 0; // throw away the string in progress this.string = null; parser.addError("msg.XML.bad.form"); return Token.ERROR; } /** * */ private boolean readQuotedString(int quote) throws IOException { for (int c = getChar(); c != EOF_CHAR; c = getChar()) { addToString(c); if (c == quote) return true; } stringBufferTop = 0; // throw away the string in progress this.string = null; parser.addError("msg.XML.bad.form"); return false; } /** * */ private boolean readXmlComment() throws IOException { for (int c = getChar(); c != EOF_CHAR;) { addToString(c); if (c == '-' && peekChar() == '-') { c = getChar(); addToString(c); if (peekChar() == '>') { c = getChar(); // Skip > addToString(c); return true; } else { continue; } } c = getChar(); } stringBufferTop = 0; // throw away the string in progress this.string = null; parser.addError("msg.XML.bad.form"); return false; } /** * */ private boolean readCDATA() throws IOException { for (int c = getChar(); c != EOF_CHAR;) { addToString(c); if (c == ']' && peekChar() == ']') { c = getChar(); addToString(c); if (peekChar() == '>') { c = getChar(); // Skip > addToString(c); return true; } else { continue; } } c = getChar(); } stringBufferTop = 0; // throw away the string in progress this.string = null; parser.addError("msg.XML.bad.form"); return false; } /** * */ private boolean readEntity() throws IOException { int declTags = 1; for (int c = getChar(); c != EOF_CHAR; c = getChar()) { addToString(c); switch (c) { case '<': declTags++; break; case '>': declTags--; if (declTags == 0) return true; break; } } stringBufferTop = 0; // throw away the string in progress this.string = null; parser.addError("msg.XML.bad.form"); return false; } /** * */ private boolean readPI() throws IOException { for (int c = getChar(); c != EOF_CHAR; c = getChar()) { addToString(c); if (c == '?' && peekChar() == '>') { c = getChar(); // Skip > addToString(c); return true; } } stringBufferTop = 0; // throw away the string in progress this.string = null; parser.addError("msg.XML.bad.form"); return false; } private String getStringFromBuffer() { return new String(stringBuffer, 0, stringBufferTop); } private void addToString(int c) { int N = stringBufferTop; if (N == stringBuffer.length) { char[] tmp = new char[stringBuffer.length * 2]; System.arraycopy(stringBuffer, 0, tmp, 0, N); stringBuffer = tmp; } stringBuffer[N] = (char)c; stringBufferTop = N + 1; } private void ungetChar(int c) { // can not unread past across line boundary if (ungetCursor != 0 && ungetBuffer[ungetCursor - 1] == '\n') Kit.codeBug(); ungetBuffer[ungetCursor++] = c; } private boolean matchChar(int test) throws IOException { int c = getChar(); if (c == test) { return true; } else { ungetChar(c); return false; } } private int peekChar() throws IOException { int c = getChar(); ungetChar(c); return c; } private int getChar() throws IOException { if (ungetCursor != 0) { return ungetBuffer[--ungetCursor]; } for(;;) { int c; if (sourceString != null) { if (sourceCursor == sourceEnd) { hitEOF = true; return EOF_CHAR; } c = sourceString.charAt(sourceCursor++); } else { if (sourceCursor == sourceEnd) { if (!fillSourceBuffer()) { hitEOF = true; return EOF_CHAR; } } c = sourceBuffer[sourceCursor++]; } if (lineEndChar >= 0) { if (lineEndChar == '\r' && c == '\n') { lineEndChar = '\n'; continue; } lineEndChar = -1; lineStart = sourceCursor - 1; lineno++; } if (c <= 127) { if (c == '\n' || c == '\r') { lineEndChar = c; c = '\n'; } } else { if (isJSFormatChar(c)) { continue; } if (ScriptRuntime.isJSLineTerminator(c)) { lineEndChar = c; c = '\n'; } } return c; } } private void skipLine() throws IOException { // skip to end of line int c; while ((c = getChar()) != EOF_CHAR && c != '\n') { } ungetChar(c); } final int getOffset() { int n = sourceCursor - lineStart; if (lineEndChar >= 0) { --n; } return n; } final String getLine() { if (sourceString != null) { // String case int lineEnd = sourceCursor; if (lineEndChar >= 0) { --lineEnd; } else { for (; lineEnd != sourceEnd; ++lineEnd) { int c = sourceString.charAt(lineEnd); if (ScriptRuntime.isJSLineTerminator(c)) { break; } } } return sourceString.substring(lineStart, lineEnd); } else { // Reader case int lineLength = sourceCursor - lineStart; if (lineEndChar >= 0) { --lineLength; } else { // Read until the end of line for (;; ++lineLength) { int i = lineStart + lineLength; if (i == sourceEnd) { try { if (!fillSourceBuffer()) { break; } } catch (IOException ioe) { // ignore it, we're already displaying an error... break; } // i recalculuation as fillSourceBuffer can move saved // line buffer and change lineStart i = lineStart + lineLength; } int c = sourceBuffer[i]; if (ScriptRuntime.isJSLineTerminator(c)) { break; } } } return new String(sourceBuffer, lineStart, lineLength); } } private boolean fillSourceBuffer() throws IOException { if (sourceString != null) Kit.codeBug(); if (sourceEnd == sourceBuffer.length) { if (lineStart != 0) { System.arraycopy(sourceBuffer, lineStart, sourceBuffer, 0, sourceEnd - lineStart); sourceEnd -= lineStart; sourceCursor -= lineStart; lineStart = 0; } else { char[] tmp = new char[sourceBuffer.length * 2]; System.arraycopy(sourceBuffer, 0, tmp, 0, sourceEnd); sourceBuffer = tmp; } } int n = sourceReader.read(sourceBuffer, sourceEnd, sourceBuffer.length - sourceEnd); if (n < 0) { return false; } sourceEnd += n; return true; } // stuff other than whitespace since start of line private boolean dirtyLine; String regExpFlags; // Set this to an inital non-null value so that the Parser has // something to retrieve even if an error has occured and no // string is found. Fosters one class of error, but saves lots of // code. private String string = ""; private double number; private char[] stringBuffer = new char[128]; private int stringBufferTop; private ObjToIntMap allStrings = new ObjToIntMap(50); // Room to backtrace from to < on failed match of the last - in <!-- private final int[] ungetBuffer = new int[3]; private int ungetCursor; private boolean hitEOF = false; private int lineStart = 0; private int lineno; private int lineEndChar = -1; private String sourceString; private Reader sourceReader; private char[] sourceBuffer; private int sourceEnd; private int sourceCursor; // for xml tokenizer private boolean xmlIsAttribute; private boolean xmlIsTagContent; private int xmlOpenTagsCount; private Parser parser; }
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package org.oep.dossiermgt.model; import com.liferay.portal.kernel.bean.AutoEscape; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.model.BaseModel; import com.liferay.portal.model.CacheModel; import com.liferay.portal.model.StagedModel; import com.liferay.portal.service.ServiceContext; import com.liferay.portlet.expando.model.ExpandoBridge; import java.io.Serializable; import java.util.Date; /** * The base model interface for the DocFile service. Represents a row in the &quot;oep_dossiermgt_docfile&quot; database table, with each column mapped to a property of this class. * * <p> * This interface and its corresponding implementation {@link org.oep.dossiermgt.model.impl.DocFileModelImpl} exist only as a container for the default property accessors generated by ServiceBuilder. Helper methods and all application logic should be put in {@link org.oep.dossiermgt.model.impl.DocFileImpl}. * </p> * * @author trungdk * @see DocFile * @see org.oep.dossiermgt.model.impl.DocFileImpl * @see org.oep.dossiermgt.model.impl.DocFileModelImpl * @generated */ public interface DocFileModel extends BaseModel<DocFile>, StagedModel { /* * NOTE FOR DEVELOPERS: * * Never modify or reference this interface directly. All methods that expect a doc file model instance should use the {@link DocFile} interface instead. */ /** * Returns the primary key of this doc file. * * @return the primary key of this doc file */ public long getPrimaryKey(); /** * Sets the primary key of this doc file. * * @param primaryKey the primary key of this doc file */ public void setPrimaryKey(long primaryKey); /** * Returns the uuid of this doc file. * * @return the uuid of this doc file */ @AutoEscape @Override public String getUuid(); /** * Sets the uuid of this doc file. * * @param uuid the uuid of this doc file */ @Override public void setUuid(String uuid); /** * Returns the doc file ID of this doc file. * * @return the doc file ID of this doc file */ public long getDocFileId(); /** * Sets the doc file ID of this doc file. * * @param docFileId the doc file ID of this doc file */ public void setDocFileId(long docFileId); /** * Returns the user ID of this doc file. * * @return the user ID of this doc file */ public long getUserId(); /** * Sets the user ID of this doc file. * * @param userId the user ID of this doc file */ public void setUserId(long userId); /** * Returns the user uuid of this doc file. * * @return the user uuid of this doc file * @throws SystemException if a system exception occurred */ public String getUserUuid() throws SystemException; /** * Sets the user uuid of this doc file. * * @param userUuid the user uuid of this doc file */ public void setUserUuid(String userUuid); /** * Returns the group ID of this doc file. * * @return the group ID of this doc file */ public long getGroupId(); /** * Sets the group ID of this doc file. * * @param groupId the group ID of this doc file */ public void setGroupId(long groupId); /** * Returns the company ID of this doc file. * * @return the company ID of this doc file */ @Override public long getCompanyId(); /** * Sets the company ID of this doc file. * * @param companyId the company ID of this doc file */ @Override public void setCompanyId(long companyId); /** * Returns the create date of this doc file. * * @return the create date of this doc file */ @Override public Date getCreateDate(); /** * Sets the create date of this doc file. * * @param createDate the create date of this doc file */ @Override public void setCreateDate(Date createDate); /** * Returns the modified date of this doc file. * * @return the modified date of this doc file */ @Override public Date getModifiedDate(); /** * Sets the modified date of this doc file. * * @param modifiedDate the modified date of this doc file */ @Override public void setModifiedDate(Date modifiedDate); /** * Returns the dossier ID of this doc file. * * @return the dossier ID of this doc file */ public long getDossierId(); /** * Sets the dossier ID of this doc file. * * @param dossierId the dossier ID of this doc file */ public void setDossierId(long dossierId); /** * Returns the dossier doc ID of this doc file. * * @return the dossier doc ID of this doc file */ public long getDossierDocId(); /** * Sets the dossier doc ID of this doc file. * * @param dossierDocId the dossier doc ID of this doc file */ public void setDossierDocId(long dossierDocId); /** * Returns the doc template ID of this doc file. * * @return the doc template ID of this doc file */ public long getDocTemplateId(); /** * Sets the doc template ID of this doc file. * * @param docTemplateId the doc template ID of this doc file */ public void setDocTemplateId(long docTemplateId); /** * Returns the doc file version ID of this doc file. * * @return the doc file version ID of this doc file */ public long getDocFileVersionId(); /** * Sets the doc file version ID of this doc file. * * @param docFileVersionId the doc file version ID of this doc file */ public void setDocFileVersionId(long docFileVersionId); /** * Returns the doc file name of this doc file. * * @return the doc file name of this doc file */ @AutoEscape public String getDocFileName(); /** * Sets the doc file name of this doc file. * * @param docFileName the doc file name of this doc file */ public void setDocFileName(String docFileName); /** * Returns the doc file type of this doc file. * * @return the doc file type of this doc file */ public long getDocFileType(); /** * Sets the doc file type of this doc file. * * @param docFileType the doc file type of this doc file */ public void setDocFileType(long docFileType); /** * Returns the verify status of this doc file. * * @return the verify status of this doc file */ public int getVerifyStatus(); /** * Sets the verify status of this doc file. * * @param verifyStatus the verify status of this doc file */ public void setVerifyStatus(int verifyStatus); /** * Returns the note of this doc file. * * @return the note of this doc file */ @AutoEscape public String getNote(); /** * Sets the note of this doc file. * * @param note the note of this doc file */ public void setNote(String note); /** * Returns the approve by of this doc file. * * @return the approve by of this doc file */ @AutoEscape public String getApproveBy(); /** * Sets the approve by of this doc file. * * @param approveBy the approve by of this doc file */ public void setApproveBy(String approveBy); /** * Returns the approve date of this doc file. * * @return the approve date of this doc file */ public Date getApproveDate(); /** * Sets the approve date of this doc file. * * @param approveDate the approve date of this doc file */ public void setApproveDate(Date approveDate); /** * Returns the premier of this doc file. * * @return the premier of this doc file */ public int getPremier(); /** * Sets the premier of this doc file. * * @param premier the premier of this doc file */ public void setPremier(int premier); @Override public boolean isNew(); @Override public void setNew(boolean n); @Override public boolean isCachedModel(); @Override public void setCachedModel(boolean cachedModel); @Override public boolean isEscapedModel(); @Override public Serializable getPrimaryKeyObj(); @Override public void setPrimaryKeyObj(Serializable primaryKeyObj); @Override public ExpandoBridge getExpandoBridge(); @Override public void setExpandoBridgeAttributes(BaseModel<?> baseModel); @Override public void setExpandoBridgeAttributes(ExpandoBridge expandoBridge); @Override public void setExpandoBridgeAttributes(ServiceContext serviceContext); @Override public Object clone(); @Override public int compareTo(DocFile docFile); @Override public int hashCode(); @Override public CacheModel<DocFile> toCacheModel(); @Override public DocFile toEscapedModel(); @Override public DocFile toUnescapedModel(); @Override public String toString(); @Override public String toXmlString(); }
/* * www.javagl.de - JglTF * * Copyright 2015-2016 Marco Hutter - http://www.javagl.de * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package de.javagl.jgltf.viewer.jogl; import static com.jogamp.opengl.GL.GL_ELEMENT_ARRAY_BUFFER; import static com.jogamp.opengl.GL.GL_STATIC_DRAW; import static com.jogamp.opengl.GL.GL_TEXTURE0; import static com.jogamp.opengl.GL.GL_TEXTURE_2D; import static com.jogamp.opengl.GL.GL_TEXTURE_MAG_FILTER; import static com.jogamp.opengl.GL.GL_TEXTURE_MIN_FILTER; import static com.jogamp.opengl.GL.GL_TEXTURE_WRAP_S; import static com.jogamp.opengl.GL.GL_TEXTURE_WRAP_T; import static com.jogamp.opengl.GL.GL_TRUE; import static com.jogamp.opengl.GL2ES2.GL_COMPILE_STATUS; import static com.jogamp.opengl.GL2ES2.GL_FRAGMENT_SHADER; import static com.jogamp.opengl.GL2ES2.GL_INFO_LOG_LENGTH; import static com.jogamp.opengl.GL2ES2.GL_VALIDATE_STATUS; import static com.jogamp.opengl.GL2ES2.GL_VERTEX_SHADER; import static com.jogamp.opengl.GL2ES3.GL_TEXTURE_BASE_LEVEL; import static com.jogamp.opengl.GL2ES3.GL_TEXTURE_MAX_LEVEL; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.IntBuffer; import java.nio.charset.Charset; import java.util.logging.Logger; import com.jogamp.opengl.GL3; import de.javagl.jgltf.model.GltfConstants; import de.javagl.jgltf.viewer.GlContext; /** * Implementation of a {@link GlContext} based on JOGL */ class GlContextJogl implements GlContext { /** * The logger used in this class */ private static final Logger logger = Logger.getLogger(GlContextJogl.class.getName()); /** * The actual GL context */ private GL3 gl; /** * Set the GL context to be used internally * * @param gl The GL context */ void setGL(GL3 gl) { this.gl = gl; } @Override public Integer createGlProgram( String vertexShaderSource, String fragmentShaderSource) { if (vertexShaderSource == null) { logger.warning("The vertexShaderSource is null"); return null; } if (fragmentShaderSource == null) { logger.warning("The fragmentShaderSource is null"); return null; } logger.fine("Creating vertex shader..."); Integer glVertexShader = createGlShader( GL_VERTEX_SHADER, vertexShaderSource); if (glVertexShader == null) { logger.warning("Creating vertex shader FAILED"); return null; } logger.fine("Creating vertex shader DONE"); logger.fine("Creating fragment shader..."); Integer glFragmentShader = createGlShader( GL_FRAGMENT_SHADER, fragmentShaderSource); if (glFragmentShader == null) { logger.warning("Creating fragment shader FAILED"); return null; } logger.fine("Creating fragment shader DONE"); int glProgram = gl.glCreateProgram(); gl.glAttachShader(glProgram, glVertexShader); gl.glDeleteShader(glVertexShader); gl.glAttachShader(glProgram, glFragmentShader); gl.glDeleteShader(glFragmentShader); gl.glLinkProgram(glProgram); gl.glValidateProgram(glProgram); int validateStatus[] = { 0 }; gl.glGetProgramiv(glProgram, GL_VALIDATE_STATUS, validateStatus, 0); if (validateStatus[0] != GL_TRUE) { printProgramLogInfo(glProgram); return null; } return glProgram; } @Override public void useGlProgram(int glProgram) { gl.glUseProgram(glProgram); } @Override public void deleteGlProgram(int glProgram) { gl.glDeleteProgram(glProgram); } @Override public void enable(Iterable<? extends Number> states) { if (states != null) { for (Number state : states) { if (state != null) { gl.glEnable(state.intValue()); } } } } @Override public void disable(Iterable<? extends Number> states) { if (states != null) { for (Number state : states) { if (state != null) { gl.glDisable(state.intValue()); } } } } /** * Creates an OpenGL shader with the given type, from the given source * code, and returns the GL shader object. If the shader cannot be * compiled, then <code>null</code> will be returned. * * @param shaderType The shader type * @param shaderSource The shader source code * @return The GL shader */ private Integer createGlShader(int shaderType, String shaderSource) { Integer glShader = createGlShaderImpl(shaderType, shaderSource); if (glShader != null) { return glShader; } // If the shader source code does not contain a #version number, // then, depending on the com.jogamp.opengl.GLProfile that was // chosen for the viewer, certain warnings may be treated as // errors. As a workaround, pragmatically insert a version // number and try again... // (Also see https://github.com/javagl/JglTF/issues/12) if (!shaderSource.contains("#version")) { String versionString = "#version 120"; logger.warning("Inserting GLSL version specifier \"" + versionString + "\" in shader code"); String shaderSourceWithVersion = versionString + "\n" + shaderSource; return createGlShaderImpl(shaderType, shaderSourceWithVersion); } return null; } /** * Implementation for {@link #createGlShader(int, String)}. * * @param shaderType The shader type * @param shaderSource The shader source code * @return The GL shader, or <code>null</code> if it cannot be compiled */ private Integer createGlShaderImpl(int shaderType, String shaderSource) { int glShader = gl.glCreateShader(shaderType); gl.glShaderSource( glShader, 1, new String[]{shaderSource}, null); gl.glCompileShader(glShader); int compileStatus[] = { 0 }; gl.glGetShaderiv(glShader, GL_COMPILE_STATUS, compileStatus, 0); if (compileStatus[0] != GL_TRUE) { printShaderLogInfo(glShader); return null; } return glShader; } @Override public int getUniformLocation(int glProgram, String uniformName) { gl.glUseProgram(glProgram); return gl.glGetUniformLocation(glProgram, uniformName); } @Override public int getAttributeLocation(int glProgram, String attributeName) { gl.glUseProgram(glProgram); return gl.glGetAttribLocation(glProgram, attributeName); } @Override public void setUniformiv(int type, int location, int count, int value[]) { if (value == null) { logger.warning("Invalid uniform value: " + value); return; } switch (type) { case GltfConstants.GL_INT: case GltfConstants.GL_UNSIGNED_INT: { gl.glUniform1iv(location, count, value, 0); break; } case GltfConstants.GL_INT_VEC2: { gl.glUniform2iv(location, count, value, 0); break; } case GltfConstants.GL_INT_VEC3: { gl.glUniform3iv(location, count, value, 0); break; } case GltfConstants.GL_INT_VEC4: { gl.glUniform4iv(location, count, value, 0); break; } default: logger.warning("Invalid uniform type: " + GltfConstants.stringFor(type)); } } @Override public void setUniformfv(int type, int location, int count, float value[]) { if (value == null) { logger.warning("Invalid uniform value: " + value); return; } switch (type) { case GltfConstants.GL_FLOAT: { gl.glUniform1fv(location, count, value, 0); break; } case GltfConstants.GL_FLOAT_VEC2: { gl.glUniform2fv(location, count, value, 0); break; } case GltfConstants.GL_FLOAT_VEC3: { gl.glUniform3fv(location, count, value, 0); break; } case GltfConstants.GL_FLOAT_VEC4: { gl.glUniform4fv(location, count, value, 0); break; } default: logger.warning("Invalid uniform type: " + GltfConstants.stringFor(type)); } } @Override public void setUniformMatrixfv( int type, int location, int count, float value[]) { if (value == null) { logger.warning("Invalid uniform value: " + value); return; } switch (type) { case GltfConstants.GL_FLOAT_MAT2: { gl.glUniformMatrix2fv(location, count, false, value, 0); break; } case GltfConstants.GL_FLOAT_MAT3: { gl.glUniformMatrix3fv(location, count, false, value, 0); break; } case GltfConstants.GL_FLOAT_MAT4: { gl.glUniformMatrix4fv(location, count, false, value, 0); break; } default: logger.warning("Invalid uniform type: " + GltfConstants.stringFor(type)); } } @Override public void setUniformSampler(int location, int textureIndex, int glTexture) { gl.glActiveTexture(GL_TEXTURE0+textureIndex); gl.glBindTexture(GL_TEXTURE_2D, glTexture); gl.glUniform1i(location, textureIndex); } @Override public int createGlVertexArray() { int vertexArrayArray[] = {0}; gl.glGenVertexArrays(1, vertexArrayArray, 0); int glVertexArray = vertexArrayArray[0]; return glVertexArray; } @Override public void deleteGlVertexArray(int glVertexArray) { gl.glDeleteVertexArrays(1, new int[] { glVertexArray }, 0); } @Override public int createGlBufferView( int target, int byteLength, ByteBuffer bufferViewData) { int bufferViewArray[] = {0}; gl.glGenBuffers(1, bufferViewArray, 0); int glBufferView = bufferViewArray[0]; gl.glBindBuffer(target, glBufferView); gl.glBufferData(target, byteLength, bufferViewData, GL_STATIC_DRAW); return glBufferView; } @Override public void createVertexAttribute(int glVertexArray, int target, int glBufferView, int attributeLocation, int size, int type, int stride, int offset) { gl.glBindVertexArray(glVertexArray); gl.glBindBuffer(target, glBufferView); gl.glVertexAttribPointer( attributeLocation, size, type, false, stride, offset); gl.glEnableVertexAttribArray(attributeLocation); } @Override public void updateVertexAttribute(int glVertexArray, int target, int glBufferView, int offset, int size, ByteBuffer data) { gl.glBindVertexArray(glVertexArray); gl.glBindBuffer(target, glBufferView); gl.glBufferSubData(target, offset, size, data); } @Override public void deleteGlBufferView(int glBufferView) { gl.glDeleteBuffers(1, new int[] { glBufferView }, 0); } @Override public int createGlTexture( ByteBuffer pixelData, int internalFormat, int width, int height, int format, int type) { int textureArray[] = {0}; gl.glGenTextures(1, textureArray, 0); int glTexture = textureArray[0]; gl.glBindTexture(GL_TEXTURE_2D, glTexture); gl.glTexImage2D( GL_TEXTURE_2D, 0, internalFormat, width, height, 0, format, type, pixelData); return glTexture; } @Override public void setGlTextureParameters(int glTexture, int minFilter, int magFilter, int wrapS, int wrapT) { gl.glBindTexture(GL_TEXTURE_2D, glTexture); gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0); gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0); gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, minFilter); gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, magFilter); gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, wrapS); gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, wrapT); } @Override public void deleteGlTexture(int glTexture) { gl.glDeleteTextures(1, new int[] { glTexture }, 0); } @Override public void renderIndexed( int glVertexArray, int mode, int glIndicesBuffer, int numIndices, int indicesType, int offset) { gl.glBindVertexArray(glVertexArray); gl.glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, glIndicesBuffer); gl.glDrawElements(mode, numIndices, indicesType, offset); } @Override public void renderNonIndexed(int glVertexArray, int mode, int numVertices) { gl.glBindVertexArray(glVertexArray); gl.glDrawArrays(mode, 0, numVertices); } @Override public void setBlendColor(float r, float g, float b, float a) { gl.glBlendColor(r, g, b, a); } @Override public void setBlendEquationSeparate(int modeRgb, int modeAlpha) { gl.glBlendEquationSeparate(modeRgb, modeAlpha); } @Override public void setBlendFuncSeparate( int srcRgb, int dstRgb, int srcAlpha, int dstAlpha) { gl.glBlendFuncSeparate(srcRgb, dstRgb, srcAlpha, dstAlpha); } @Override public void setColorMask(boolean r, boolean g, boolean b, boolean a) { gl.glColorMask(r, g, b, a); } @Override public void setCullFace(int mode) { gl.glCullFace(mode); } @Override public void setDepthFunc(int func) { gl.glDepthFunc(func); } @Override public void setDepthMask(boolean mask) { gl.glDepthMask(mask); } @Override public void setDepthRange(float zNear, float zFar) { gl.glDepthRange(zNear, zFar); } @Override public void setFrontFace(int mode) { gl.glFrontFace(mode); } @Override public void setLineWidth(float width) { gl.glLineWidth(width); } @Override public void setPolygonOffset(float factor, float units) { gl.glPolygonOffset(factor, units); } @Override public void setScissor(int x, int y, int width, int height) { gl.glScissor(x, y, width, height); } /** * For debugging: Print shader log info * * @param id shader ID */ private void printShaderLogInfo(int id) { IntBuffer infoLogLength = ByteBuffer .allocateDirect(4) .order(ByteOrder.nativeOrder()) .asIntBuffer(); gl.glGetShaderiv(id, GL_INFO_LOG_LENGTH, infoLogLength); if (infoLogLength.get(0) > 0) { infoLogLength.put(0, infoLogLength.get(0) - 1); } ByteBuffer infoLog = ByteBuffer .allocateDirect(infoLogLength.get(0)) .order(ByteOrder.nativeOrder()); gl.glGetShaderInfoLog(id, infoLogLength.get(0), null, infoLog); String infoLogString = Charset.forName("US-ASCII").decode(infoLog).toString(); if (infoLogString.trim().length() > 0) { logger.warning("shader log:\n"+infoLogString); } } /** * For debugging: Print program log info * * @param id program ID */ private void printProgramLogInfo(int id) { IntBuffer infoLogLength = ByteBuffer .allocateDirect(4) .order(ByteOrder.nativeOrder()) .asIntBuffer(); gl.glGetProgramiv(id, GL_INFO_LOG_LENGTH, infoLogLength); if (infoLogLength.get(0) > 0) { infoLogLength.put(0, infoLogLength.get(0) - 1); } ByteBuffer infoLog = ByteBuffer .allocateDirect(infoLogLength.get(0)) .order(ByteOrder.nativeOrder()); gl.glGetProgramInfoLog(id, infoLogLength.get(0), null, infoLog); String infoLogString = Charset.forName("US-ASCII").decode(infoLog).toString(); if (infoLogString.trim().length() > 0) { logger.warning("program log:\n"+infoLogString); } } }
package com.hazelcast.simulator.utils; import org.junit.Test; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import static com.hazelcast.simulator.utils.CommonUtils.await; import static com.hazelcast.simulator.utils.CommonUtils.getSimulatorVersion; import static com.hazelcast.simulator.utils.CommonUtils.joinThread; import static com.hazelcast.simulator.utils.CommonUtils.rethrow; import static com.hazelcast.simulator.utils.CommonUtils.sleepMillis; import static com.hazelcast.simulator.utils.CommonUtils.throwableToString; import static com.hazelcast.simulator.utils.ReflectionUtils.invokePrivateConstructor; import static java.lang.String.format; import static java.util.concurrent.TimeUnit.HOURS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class CommonUtils_MiscTest { private static final int DEFAULT_TEST_TIMEOUT = 5000; @Test public void testConstructor() throws Exception { invokePrivateConstructor(CommonUtils.class); } @Test public void testGetSimulatorVersion() { assertEquals("SNAPSHOT", getSimulatorVersion()); } @Test public void testRethrow_RuntimeException() { Throwable throwable = new RuntimeException(); try { throw rethrow(throwable); } catch (RuntimeException e) { assertEquals(throwable, e); } } @Test public void testRethrow_Throwable() { Throwable throwable = new Throwable(); try { throw rethrow(throwable); } catch (RuntimeException e) { assertEquals(throwable, e.getCause()); } } @Test public void testThrowableToString() { String marker = "#*+*#"; Throwable throwable = new Throwable(marker); String actual = throwableToString(throwable); assertTrue(format("Expected throwable string to contain marker %s, but was %s", marker, actual), actual.contains(marker)); } @Test(timeout = DEFAULT_TEST_TIMEOUT) public void testJoinThread() { Thread thread = new Thread() { @Override public void run() { sleepMillis(500); } }; thread.start(); joinThread(thread); } @Test public void testJoinThread_withNull() { joinThread(null); } @Test(timeout = DEFAULT_TEST_TIMEOUT) public void testJoinThread_interrupted() { final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean isInterrupted = new AtomicBoolean(); final Thread thread = new Thread() { @Override public void run() { await(latch); } }; Thread joiner = new Thread() { @Override public void run() { joinThread(thread); isInterrupted.set(Thread.currentThread().isInterrupted()); } }; thread.start(); joiner.start(); joiner.interrupt(); joinThread(joiner); latch.countDown(); joinThread(thread); assertTrue(isInterrupted.get()); } @Test(timeout = DEFAULT_TEST_TIMEOUT) public void testJoinThread_withTimeout() { Thread thread = new Thread() { @Override public void run() { sleepMillis(500); } }; thread.start(); joinThread(thread, HOURS.toMillis(1)); } @Test public void testJoinThread_withTimeout_withNull() { joinThread(null, HOURS.toMillis(1)); } @Test(timeout = DEFAULT_TEST_TIMEOUT) public void testJoinThread_withTimeout_interrupted() { final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean isInterrupted = new AtomicBoolean(); final Thread thread = new Thread() { @Override public void run() { await(latch); } }; Thread joiner = new Thread() { @Override public void run() { joinThread(thread, HOURS.toMillis(1)); isInterrupted.set(Thread.currentThread().isInterrupted()); } }; thread.start(); joiner.start(); joiner.interrupt(); joinThread(joiner); latch.countDown(); joinThread(thread); assertTrue(isInterrupted.get()); } @Test public void testJoinThread_whenThreadIsNull_thenNothingHappens() { joinThread(null); } @Test(timeout = DEFAULT_TEST_TIMEOUT) public void testAwait() { final CountDownLatch latch = new CountDownLatch(1); Thread thread = new Thread() { @Override public void run() { sleepMillis(100); latch.countDown(); } }; thread.start(); await(latch); } @Test(timeout = DEFAULT_TEST_TIMEOUT) public void testAwait_whenInterrupted_thenRestoreInterruptedFlag() { final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean isInterrupted = new AtomicBoolean(); Thread waiter = new Thread() { @Override public void run() { await(latch); isInterrupted.set(Thread.currentThread().isInterrupted()); } }; waiter.start(); waiter.interrupt(); joinThread(waiter); assertEquals(1, latch.getCount()); assertTrue(isInterrupted.get()); } @Test(timeout = DEFAULT_TEST_TIMEOUT) public void testAwait_withTimeUnit() { final CountDownLatch latch = new CountDownLatch(1); Thread thread = new Thread() { @Override public void run() { sleepMillis(100); latch.countDown(); } }; thread.start(); boolean success = await(latch, 10, SECONDS); assertTrue(success); } @Test(timeout = DEFAULT_TEST_TIMEOUT) public void testAwait_withTimeUnit_whenInterrupted_thenRestoreInterruptedFlag() { final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean isInterrupted = new AtomicBoolean(); final AtomicBoolean isSuccess = new AtomicBoolean(); Thread waiter = new Thread() { @Override public void run() { boolean success = await(latch, 10, SECONDS); isInterrupted.set(Thread.currentThread().isInterrupted()); isSuccess.set(success); } }; waiter.start(); waiter.interrupt(); joinThread(waiter); assertEquals(1, latch.getCount()); assertTrue(isInterrupted.get()); assertFalse(isSuccess.get()); } }
/* * Copyright 2013-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.netflix.eureka; import java.util.Objects; import com.netflix.discovery.shared.transport.EurekaTransportConfig; /** * @author Spencer Gibb * @author Gregor Zurowski */ public class CloudEurekaTransportConfig implements EurekaTransportConfig { private int sessionedClientReconnectIntervalSeconds = 20 * 60; private double retryableClientQuarantineRefreshPercentage = 0.66; private int bootstrapResolverRefreshIntervalSeconds = 5 * 60; private int applicationsResolverDataStalenessThresholdSeconds = 5 * 60; private int asyncResolverRefreshIntervalMs = 5 * 60 * 1000; private int asyncResolverWarmUpTimeoutMs = 5000; private int asyncExecutorThreadPoolSize = 5; private String readClusterVip; private String writeClusterVip; private boolean bootstrapResolverForQuery = true; private String bootstrapResolverStrategy; private boolean applicationsResolverUseIp = false; @Override public boolean useBootstrapResolverForQuery() { return this.bootstrapResolverForQuery; } @Override public boolean applicationsResolverUseIp() { return this.applicationsResolverUseIp; } public int getSessionedClientReconnectIntervalSeconds() { return sessionedClientReconnectIntervalSeconds; } public void setSessionedClientReconnectIntervalSeconds( int sessionedClientReconnectIntervalSeconds) { this.sessionedClientReconnectIntervalSeconds = sessionedClientReconnectIntervalSeconds; } public double getRetryableClientQuarantineRefreshPercentage() { return retryableClientQuarantineRefreshPercentage; } public void setRetryableClientQuarantineRefreshPercentage( double retryableClientQuarantineRefreshPercentage) { this.retryableClientQuarantineRefreshPercentage = retryableClientQuarantineRefreshPercentage; } public int getBootstrapResolverRefreshIntervalSeconds() { return bootstrapResolverRefreshIntervalSeconds; } public void setBootstrapResolverRefreshIntervalSeconds( int bootstrapResolverRefreshIntervalSeconds) { this.bootstrapResolverRefreshIntervalSeconds = bootstrapResolverRefreshIntervalSeconds; } public int getApplicationsResolverDataStalenessThresholdSeconds() { return applicationsResolverDataStalenessThresholdSeconds; } public void setApplicationsResolverDataStalenessThresholdSeconds( int applicationsResolverDataStalenessThresholdSeconds) { this.applicationsResolverDataStalenessThresholdSeconds = applicationsResolverDataStalenessThresholdSeconds; } public int getAsyncResolverRefreshIntervalMs() { return asyncResolverRefreshIntervalMs; } public void setAsyncResolverRefreshIntervalMs(int asyncResolverRefreshIntervalMs) { this.asyncResolverRefreshIntervalMs = asyncResolverRefreshIntervalMs; } public int getAsyncResolverWarmUpTimeoutMs() { return asyncResolverWarmUpTimeoutMs; } public void setAsyncResolverWarmUpTimeoutMs(int asyncResolverWarmUpTimeoutMs) { this.asyncResolverWarmUpTimeoutMs = asyncResolverWarmUpTimeoutMs; } public int getAsyncExecutorThreadPoolSize() { return asyncExecutorThreadPoolSize; } public void setAsyncExecutorThreadPoolSize(int asyncExecutorThreadPoolSize) { this.asyncExecutorThreadPoolSize = asyncExecutorThreadPoolSize; } public String getReadClusterVip() { return readClusterVip; } public void setReadClusterVip(String readClusterVip) { this.readClusterVip = readClusterVip; } public String getWriteClusterVip() { return writeClusterVip; } public void setWriteClusterVip(String writeClusterVip) { this.writeClusterVip = writeClusterVip; } public boolean isBootstrapResolverForQuery() { return bootstrapResolverForQuery; } public void setBootstrapResolverForQuery(boolean bootstrapResolverForQuery) { this.bootstrapResolverForQuery = bootstrapResolverForQuery; } public String getBootstrapResolverStrategy() { return bootstrapResolverStrategy; } public void setBootstrapResolverStrategy(String bootstrapResolverStrategy) { this.bootstrapResolverStrategy = bootstrapResolverStrategy; } public boolean isApplicationsResolverUseIp() { return applicationsResolverUseIp; } public void setApplicationsResolverUseIp(boolean applicationsResolverUseIp) { this.applicationsResolverUseIp = applicationsResolverUseIp; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CloudEurekaTransportConfig that = (CloudEurekaTransportConfig) o; return sessionedClientReconnectIntervalSeconds == that.sessionedClientReconnectIntervalSeconds && Double.compare(retryableClientQuarantineRefreshPercentage, that.retryableClientQuarantineRefreshPercentage) == 0 && bootstrapResolverRefreshIntervalSeconds == that.bootstrapResolverRefreshIntervalSeconds && applicationsResolverDataStalenessThresholdSeconds == that.applicationsResolverDataStalenessThresholdSeconds && asyncResolverRefreshIntervalMs == that.asyncResolverRefreshIntervalMs && asyncResolverWarmUpTimeoutMs == that.asyncResolverWarmUpTimeoutMs && asyncExecutorThreadPoolSize == that.asyncExecutorThreadPoolSize && Objects.equals(readClusterVip, that.readClusterVip) && Objects.equals(writeClusterVip, that.writeClusterVip) && bootstrapResolverForQuery == that.bootstrapResolverForQuery && Objects.equals(bootstrapResolverStrategy, that.bootstrapResolverStrategy) && applicationsResolverUseIp == that.applicationsResolverUseIp; } @Override public int hashCode() { return Objects.hash(sessionedClientReconnectIntervalSeconds, retryableClientQuarantineRefreshPercentage, bootstrapResolverRefreshIntervalSeconds, applicationsResolverDataStalenessThresholdSeconds, asyncResolverRefreshIntervalMs, asyncResolverWarmUpTimeoutMs, asyncExecutorThreadPoolSize, readClusterVip, writeClusterVip, bootstrapResolverForQuery, bootstrapResolverStrategy, applicationsResolverUseIp); } @Override public String toString() { return new StringBuilder("CloudEurekaTransportConfig{") .append("sessionedClientReconnectIntervalSeconds=") .append(sessionedClientReconnectIntervalSeconds).append(", ") .append("retryableClientQuarantineRefreshPercentage=") .append(retryableClientQuarantineRefreshPercentage).append(", ") .append("bootstrapResolverRefreshIntervalSeconds=") .append(bootstrapResolverRefreshIntervalSeconds).append(", ") .append("applicationsResolverDataStalenessThresholdSeconds=") .append(applicationsResolverDataStalenessThresholdSeconds).append(", ") .append("asyncResolverRefreshIntervalMs=") .append(asyncResolverRefreshIntervalMs).append(", ") .append("asyncResolverWarmUpTimeoutMs=") .append(asyncResolverWarmUpTimeoutMs).append(", ") .append("asyncExecutorThreadPoolSize=") .append(asyncExecutorThreadPoolSize).append(", ") .append("readClusterVip='").append(readClusterVip).append("', ") .append("writeClusterVip='").append(writeClusterVip).append("', ") .append("bootstrapResolverForQuery=").append(bootstrapResolverForQuery) .append(", ").append("bootstrapResolverStrategy='") .append(bootstrapResolverStrategy).append("', ") .append("applicationsResolverUseIp=").append(applicationsResolverUseIp) .append(", ").append("}").toString(); } }
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.keycloak.protocol.oidc; import org.jboss.logging.Logger; import org.keycloak.OAuth2Constants; import org.keycloak.events.Details; import org.keycloak.events.EventBuilder; import org.keycloak.events.EventType; import org.keycloak.models.ClientModel; import org.keycloak.models.ClientSessionModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.UserSessionModel; import org.keycloak.protocol.LoginProtocol; import org.keycloak.protocol.RestartLoginCookie; import org.keycloak.services.managers.ClientSessionCode; import org.keycloak.services.managers.ResourceAdminManager; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a> */ public class OIDCLoginProtocol implements LoginProtocol { public static final String LOGIN_PROTOCOL = "openid-connect"; public static final String STATE_PARAM = "state"; public static final String LOGOUT_STATE_PARAM = "OIDC_LOGOUT_STATE_PARAM"; public static final String SCOPE_PARAM = "scope"; public static final String CODE_PARAM = "code"; public static final String RESPONSE_TYPE_PARAM = "response_type"; public static final String GRANT_TYPE_PARAM = "grant_type"; public static final String REDIRECT_URI_PARAM = "redirect_uri"; public static final String CLIENT_ID_PARAM = "client_id"; public static final String NONCE_PARAM = "nonce"; public static final String PROMPT_PARAM = "prompt"; public static final String LOGIN_HINT_PARAM = "login_hint"; public static final String LOGOUT_REDIRECT_URI = "OIDC_LOGOUT_REDIRECT_URI"; public static final String ISSUER = "iss"; private static final Logger log = Logger.getLogger(OIDCLoginProtocol.class); protected KeycloakSession session; protected RealmModel realm; protected UriInfo uriInfo; protected HttpHeaders headers; protected EventBuilder event; public OIDCLoginProtocol(KeycloakSession session, RealmModel realm, UriInfo uriInfo, HttpHeaders headers, EventBuilder event) { this.session = session; this.realm = realm; this.uriInfo = uriInfo; this.headers = headers; this.event = event; } public OIDCLoginProtocol(){ } @Override public OIDCLoginProtocol setSession(KeycloakSession session) { this.session = session; return this; } @Override public OIDCLoginProtocol setRealm(RealmModel realm) { this.realm = realm; return this; } @Override public OIDCLoginProtocol setUriInfo(UriInfo uriInfo) { this.uriInfo = uriInfo; return this; } @Override public OIDCLoginProtocol setHttpHeaders(HttpHeaders headers){ this.headers = headers; return this; } @Override public OIDCLoginProtocol setEventBuilder(EventBuilder event) { this.event = event; return this; } @Override public Response cancelLogin(ClientSessionModel clientSession) { String redirect = clientSession.getRedirectUri(); String state = clientSession.getNote(OIDCLoginProtocol.STATE_PARAM); UriBuilder redirectUri = UriBuilder.fromUri(redirect).queryParam(OAuth2Constants.ERROR, "access_denied"); if (state != null) { redirectUri.queryParam(OAuth2Constants.STATE, state); } session.sessions().removeClientSession(realm, clientSession); RestartLoginCookie.expireRestartCookie(realm, session.getContext().getConnection(), uriInfo); return Response.status(302).location(redirectUri.build()).build(); } @Override public Response authenticated(UserSessionModel userSession, ClientSessionCode accessCode) { ClientSessionModel clientSession = accessCode.getClientSession(); String redirect = clientSession.getRedirectUri(); String state = clientSession.getNote(OIDCLoginProtocol.STATE_PARAM); accessCode.setAction(ClientSessionModel.Action.CODE_TO_TOKEN.name()); UriBuilder redirectUri = UriBuilder.fromUri(redirect).queryParam(OAuth2Constants.CODE, accessCode.getCode()); log.debugv("redirectAccessCode: state: {0}", state); if (state != null) redirectUri.queryParam(OAuth2Constants.STATE, state); Response.ResponseBuilder location = Response.status(302).location(redirectUri.build()); return location.build(); } public Response consentDenied(ClientSessionModel clientSession) { String redirect = clientSession.getRedirectUri(); String state = clientSession.getNote(OIDCLoginProtocol.STATE_PARAM); UriBuilder redirectUri = UriBuilder.fromUri(redirect).queryParam(OAuth2Constants.ERROR, "access_denied"); if (state != null) redirectUri.queryParam(OAuth2Constants.STATE, state); session.sessions().removeClientSession(realm, clientSession); RestartLoginCookie.expireRestartCookie(realm, session.getContext().getConnection(), uriInfo); Response.ResponseBuilder location = Response.status(302).location(redirectUri.build()); return location.build(); } public Response invalidSessionError(ClientSessionModel clientSession) { String redirect = clientSession.getRedirectUri(); String state = clientSession.getNote(OIDCLoginProtocol.STATE_PARAM); UriBuilder redirectUri = UriBuilder.fromUri(redirect).queryParam(OAuth2Constants.ERROR, "access_denied"); if (state != null) { redirectUri.queryParam(OAuth2Constants.STATE, state); } return Response.status(302).location(redirectUri.build()).build(); } @Override public void backchannelLogout(UserSessionModel userSession, ClientSessionModel clientSession) { if (!(clientSession.getClient() instanceof ClientModel)) return; ClientModel app = clientSession.getClient(); new ResourceAdminManager(session).logoutClientSession(uriInfo.getRequestUri(), realm, app, clientSession); } @Override public Response frontchannelLogout(UserSessionModel userSession, ClientSessionModel clientSession) { // todo oidc redirect support throw new RuntimeException("NOT IMPLEMENTED"); } @Override public Response finishLogout(UserSessionModel userSession) { String redirectUri = userSession.getNote(OIDCLoginProtocol.LOGOUT_REDIRECT_URI); String state = userSession.getNote(OIDCLoginProtocol.LOGOUT_STATE_PARAM); event.event(EventType.LOGOUT); if (redirectUri != null) { event.detail(Details.REDIRECT_URI, redirectUri); } event.user(userSession.getUser()).session(userSession).success(); if (redirectUri != null) { UriBuilder uriBuilder = UriBuilder.fromUri(redirectUri); if (state != null) uriBuilder.queryParam(STATE_PARAM, state); return Response.status(302).location(uriBuilder.build()).build(); } else { return Response.ok().build(); } } @Override public void close() { } }
/* * Copyright 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.pysrc.internal; import static com.google.template.soy.pysrc.internal.SoyExprForPySubject.assertThatSoyExpr; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.template.soy.exprtree.Operator; import com.google.template.soy.pysrc.restricted.PyExpr; import com.google.template.soy.pysrc.restricted.PyListExpr; import com.google.template.soy.pysrc.restricted.PyStringExpr; import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Unit tests for TranslateToPyExprVisitor. * */ @RunWith(JUnit4.class) public class TranslateToPyExprVisitorTest { @Test public void testNullLiteral() { assertThatSoyExpr("null").translatesTo("None", Integer.MAX_VALUE); } @Test public void testBooleanLiteral() { assertThatSoyExpr("true").translatesTo("True", Integer.MAX_VALUE); assertThatSoyExpr("false").translatesTo("False", Integer.MAX_VALUE); } @Test public void testStringLiteral() { assertThatSoyExpr("'waldo'") .translatesTo(new PyExpr("'waldo'", Integer.MAX_VALUE), PyStringExpr.class); } @Test public void testListLiteral() { assertThatSoyExpr("[]").translatesTo(new PyExpr("[]", Integer.MAX_VALUE), PyListExpr.class); assertThatSoyExpr("['blah', 123, $foo]") .translatesTo( new PyExpr("['blah', 123, data.get('foo')]", Integer.MAX_VALUE), PyListExpr.class); } @Test public void testMapLiteral() { // Unquoted keys. assertThatSoyExpr("[:]").translatesTo("collections.OrderedDict([])", Integer.MAX_VALUE); assertThatSoyExpr("['aaa': 123, 'bbb': 'blah']") .translatesTo( "collections.OrderedDict([('aaa', 123), ('bbb', 'blah')])", Integer.MAX_VALUE); assertThatSoyExpr("['aaa': $foo, 'bbb': 'blah']") .translatesTo( "collections.OrderedDict([('aaa', data.get('foo')), ('bbb', 'blah')])", Integer.MAX_VALUE); // Non-string keys are allowed in Python. assertThatSoyExpr("[1: 'blah', 0: 123]") .translatesTo("collections.OrderedDict([(1, 'blah'), (0, 123)])", Integer.MAX_VALUE); } @Test public void testMapLiteral_quotedKeysIfJS() { // quoteKeysIfJs should change nothing in Python. assertThatSoyExpr("quoteKeysIfJs([:])") .translatesTo("collections.OrderedDict([])", Integer.MAX_VALUE); assertThatSoyExpr("quoteKeysIfJs( ['aaa': $foo, 'bbb': 'blah'] )") .translatesTo( "collections.OrderedDict([('aaa', data.get('foo')), ('bbb', 'blah')])", Integer.MAX_VALUE); } @Test public void testGlobals() { ImmutableMap<String, Object> globals = ImmutableMap.<String, Object>builder() .put("STR", "Hello World") .put("NUM", 55) .put("BOOL", true) .build(); assertThatSoyExpr("STR").withGlobals(globals).translatesTo("'Hello World'", Integer.MAX_VALUE); assertThatSoyExpr("NUM").withGlobals(globals).translatesTo("55", Integer.MAX_VALUE); assertThatSoyExpr("BOOL").withGlobals(globals).translatesTo("True", Integer.MAX_VALUE); } @Test public void testDataRef() { assertThatSoyExpr("$boo").translatesTo("data.get('boo')", Integer.MAX_VALUE); assertThatSoyExpr("$boo.goo").translatesTo("data.get('boo').get('goo')", Integer.MAX_VALUE); assertThatSoyExpr("$boo['goo']") .translatesTo("runtime.key_safe_data_access(data.get('boo'), 'goo')", Integer.MAX_VALUE); assertThatSoyExpr("$boo[0]") .translatesTo("runtime.key_safe_data_access(data.get('boo'), 0)", Integer.MAX_VALUE); assertThatSoyExpr("$boo[0]") .translatesTo("runtime.key_safe_data_access(data.get('boo'), 0)", Integer.MAX_VALUE); assertThatSoyExpr("$boo[$foo][$foo+1]") .translatesTo( "runtime.key_safe_data_access(" + "runtime.key_safe_data_access(data.get('boo'), data.get('foo')), " + "runtime.type_safe_add(data.get('foo'), 1))", Integer.MAX_VALUE); assertThatSoyExpr("$boo?.goo") .translatesTo( "None if data.get('boo') is None else data.get('boo').get('goo')", Operator.CONDITIONAL); assertThatSoyExpr("$boo?[0]?[1]") .translatesTo( "None if data.get('boo') is None else " + "None if runtime.key_safe_data_access(data.get('boo'), 0) is None else " + "runtime.key_safe_data_access(" + "runtime.key_safe_data_access(data.get('boo'), 0), 1)", Operator.CONDITIONAL); } @Test public void testDataRef_localVars() { Map<String, PyExpr> frame = Maps.newHashMap(); frame.put("zoo", new PyExpr("zooData8", Integer.MAX_VALUE)); assertThatSoyExpr("$zoo").with(frame).translatesTo("zooData8", Integer.MAX_VALUE); assertThatSoyExpr("$zoo.boo") .with(frame) .translatesTo("zooData8.get('boo')", Integer.MAX_VALUE); } @Test public void testBasicOperators() { assertThatSoyExpr("not $boo or true and $foo") .translatesTo("not data.get('boo') or True and data.get('foo')", Operator.OR); } @Test public void testEqualOperator() { assertThatSoyExpr("'5' == 5 ? 1 : 0") .translatesTo("1 if runtime.type_safe_eq('5', 5) else 0", 1); assertThatSoyExpr("'5' == $boo ? 1 : 0") .translatesTo("1 if runtime.type_safe_eq('5', data.get('boo')) else 0", 1); } @Test public void testNotEqualOperator() { assertThatSoyExpr("'5' != 5").translatesTo("not runtime.type_safe_eq('5', 5)", Operator.NOT); } @Test public void testPlusOperator() { assertThatSoyExpr("( (8-4) + (2-1) )") .translatesTo("runtime.type_safe_add(8 - 4, 2 - 1)", Integer.MAX_VALUE); } @Test public void testNullCoalescingOperator() { assertThatSoyExpr("$boo ?: 5") .translatesTo( "data.get('boo') if data.get('boo') is not None else 5", Operator.CONDITIONAL); } @Test public void testConditionalOperator() { assertThatSoyExpr("$boo ? 5 : 6") .translatesTo("5 if data.get('boo') else 6", Operator.CONDITIONAL); } @Test public void testCheckNotNull() { assertThatSoyExpr("checkNotNull($boo) ? 1 : 0") .translatesTo("1 if runtime.check_not_null(data.get('boo')) else 0", Operator.CONDITIONAL); } @Test public void testCss() { assertThatSoyExpr("css('foo')").translatesTo("runtime.get_css_name('foo')", Integer.MAX_VALUE); assertThatSoyExpr("css($foo, 'base')") .translatesTo("runtime.get_css_name(data.get('foo'), 'base')", Integer.MAX_VALUE); } @Test public void testXid() { assertThatSoyExpr("xid('foo')").translatesTo("runtime.get_xid_name('foo')", Integer.MAX_VALUE); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.managedidentities.v1; import static com.google.cloud.managedidentities.v1.ManagedIdentitiesServiceClient.ListDomainsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.managedidentities.v1.stub.ManagedIdentitiesServiceStubSettings; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link ManagedIdentitiesServiceClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (managedidentities.googleapis.com) and default port (443) are * used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of resetAdminPassword to 30 seconds: * * <pre>{@code * ManagedIdentitiesServiceSettings.Builder managedIdentitiesServiceSettingsBuilder = * ManagedIdentitiesServiceSettings.newBuilder(); * managedIdentitiesServiceSettingsBuilder * .resetAdminPasswordSettings() * .setRetrySettings( * managedIdentitiesServiceSettingsBuilder * .resetAdminPasswordSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * ManagedIdentitiesServiceSettings managedIdentitiesServiceSettings = * managedIdentitiesServiceSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class ManagedIdentitiesServiceSettings extends ClientSettings<ManagedIdentitiesServiceSettings> { /** Returns the object with the settings used for calls to createMicrosoftAdDomain. */ public UnaryCallSettings<CreateMicrosoftAdDomainRequest, Operation> createMicrosoftAdDomainSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()) .createMicrosoftAdDomainSettings(); } /** Returns the object with the settings used for calls to createMicrosoftAdDomain. */ public OperationCallSettings<CreateMicrosoftAdDomainRequest, Domain, OpMetadata> createMicrosoftAdDomainOperationSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()) .createMicrosoftAdDomainOperationSettings(); } /** Returns the object with the settings used for calls to resetAdminPassword. */ public UnaryCallSettings<ResetAdminPasswordRequest, ResetAdminPasswordResponse> resetAdminPasswordSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).resetAdminPasswordSettings(); } /** Returns the object with the settings used for calls to listDomains. */ public PagedCallSettings<ListDomainsRequest, ListDomainsResponse, ListDomainsPagedResponse> listDomainsSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).listDomainsSettings(); } /** Returns the object with the settings used for calls to getDomain. */ public UnaryCallSettings<GetDomainRequest, Domain> getDomainSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).getDomainSettings(); } /** Returns the object with the settings used for calls to updateDomain. */ public UnaryCallSettings<UpdateDomainRequest, Operation> updateDomainSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).updateDomainSettings(); } /** Returns the object with the settings used for calls to updateDomain. */ public OperationCallSettings<UpdateDomainRequest, Domain, OpMetadata> updateDomainOperationSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()) .updateDomainOperationSettings(); } /** Returns the object with the settings used for calls to deleteDomain. */ public UnaryCallSettings<DeleteDomainRequest, Operation> deleteDomainSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).deleteDomainSettings(); } /** Returns the object with the settings used for calls to deleteDomain. */ public OperationCallSettings<DeleteDomainRequest, Empty, OpMetadata> deleteDomainOperationSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()) .deleteDomainOperationSettings(); } /** Returns the object with the settings used for calls to attachTrust. */ public UnaryCallSettings<AttachTrustRequest, Operation> attachTrustSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).attachTrustSettings(); } /** Returns the object with the settings used for calls to attachTrust. */ public OperationCallSettings<AttachTrustRequest, Domain, OpMetadata> attachTrustOperationSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()) .attachTrustOperationSettings(); } /** Returns the object with the settings used for calls to reconfigureTrust. */ public UnaryCallSettings<ReconfigureTrustRequest, Operation> reconfigureTrustSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).reconfigureTrustSettings(); } /** Returns the object with the settings used for calls to reconfigureTrust. */ public OperationCallSettings<ReconfigureTrustRequest, Domain, OpMetadata> reconfigureTrustOperationSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()) .reconfigureTrustOperationSettings(); } /** Returns the object with the settings used for calls to detachTrust. */ public UnaryCallSettings<DetachTrustRequest, Operation> detachTrustSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).detachTrustSettings(); } /** Returns the object with the settings used for calls to detachTrust. */ public OperationCallSettings<DetachTrustRequest, Domain, OpMetadata> detachTrustOperationSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()) .detachTrustOperationSettings(); } /** Returns the object with the settings used for calls to validateTrust. */ public UnaryCallSettings<ValidateTrustRequest, Operation> validateTrustSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()).validateTrustSettings(); } /** Returns the object with the settings used for calls to validateTrust. */ public OperationCallSettings<ValidateTrustRequest, Domain, OpMetadata> validateTrustOperationSettings() { return ((ManagedIdentitiesServiceStubSettings) getStubSettings()) .validateTrustOperationSettings(); } public static final ManagedIdentitiesServiceSettings create( ManagedIdentitiesServiceStubSettings stub) throws IOException { return new ManagedIdentitiesServiceSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return ManagedIdentitiesServiceStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return ManagedIdentitiesServiceStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return ManagedIdentitiesServiceStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return ManagedIdentitiesServiceStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return ManagedIdentitiesServiceStubSettings.defaultGrpcTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return ManagedIdentitiesServiceStubSettings.defaultTransportChannelProvider(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ManagedIdentitiesServiceStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected ManagedIdentitiesServiceSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for ManagedIdentitiesServiceSettings. */ public static class Builder extends ClientSettings.Builder<ManagedIdentitiesServiceSettings, Builder> { protected Builder() throws IOException { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(ManagedIdentitiesServiceStubSettings.newBuilder(clientContext)); } protected Builder(ManagedIdentitiesServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(ManagedIdentitiesServiceStubSettings.Builder stubSettings) { super(stubSettings); } private static Builder createDefault() { return new Builder(ManagedIdentitiesServiceStubSettings.newBuilder()); } public ManagedIdentitiesServiceStubSettings.Builder getStubSettingsBuilder() { return ((ManagedIdentitiesServiceStubSettings.Builder) getStubSettings()); } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to createMicrosoftAdDomain. */ public UnaryCallSettings.Builder<CreateMicrosoftAdDomainRequest, Operation> createMicrosoftAdDomainSettings() { return getStubSettingsBuilder().createMicrosoftAdDomainSettings(); } /** Returns the builder for the settings used for calls to createMicrosoftAdDomain. */ public OperationCallSettings.Builder<CreateMicrosoftAdDomainRequest, Domain, OpMetadata> createMicrosoftAdDomainOperationSettings() { return getStubSettingsBuilder().createMicrosoftAdDomainOperationSettings(); } /** Returns the builder for the settings used for calls to resetAdminPassword. */ public UnaryCallSettings.Builder<ResetAdminPasswordRequest, ResetAdminPasswordResponse> resetAdminPasswordSettings() { return getStubSettingsBuilder().resetAdminPasswordSettings(); } /** Returns the builder for the settings used for calls to listDomains. */ public PagedCallSettings.Builder< ListDomainsRequest, ListDomainsResponse, ListDomainsPagedResponse> listDomainsSettings() { return getStubSettingsBuilder().listDomainsSettings(); } /** Returns the builder for the settings used for calls to getDomain. */ public UnaryCallSettings.Builder<GetDomainRequest, Domain> getDomainSettings() { return getStubSettingsBuilder().getDomainSettings(); } /** Returns the builder for the settings used for calls to updateDomain. */ public UnaryCallSettings.Builder<UpdateDomainRequest, Operation> updateDomainSettings() { return getStubSettingsBuilder().updateDomainSettings(); } /** Returns the builder for the settings used for calls to updateDomain. */ public OperationCallSettings.Builder<UpdateDomainRequest, Domain, OpMetadata> updateDomainOperationSettings() { return getStubSettingsBuilder().updateDomainOperationSettings(); } /** Returns the builder for the settings used for calls to deleteDomain. */ public UnaryCallSettings.Builder<DeleteDomainRequest, Operation> deleteDomainSettings() { return getStubSettingsBuilder().deleteDomainSettings(); } /** Returns the builder for the settings used for calls to deleteDomain. */ public OperationCallSettings.Builder<DeleteDomainRequest, Empty, OpMetadata> deleteDomainOperationSettings() { return getStubSettingsBuilder().deleteDomainOperationSettings(); } /** Returns the builder for the settings used for calls to attachTrust. */ public UnaryCallSettings.Builder<AttachTrustRequest, Operation> attachTrustSettings() { return getStubSettingsBuilder().attachTrustSettings(); } /** Returns the builder for the settings used for calls to attachTrust. */ public OperationCallSettings.Builder<AttachTrustRequest, Domain, OpMetadata> attachTrustOperationSettings() { return getStubSettingsBuilder().attachTrustOperationSettings(); } /** Returns the builder for the settings used for calls to reconfigureTrust. */ public UnaryCallSettings.Builder<ReconfigureTrustRequest, Operation> reconfigureTrustSettings() { return getStubSettingsBuilder().reconfigureTrustSettings(); } /** Returns the builder for the settings used for calls to reconfigureTrust. */ public OperationCallSettings.Builder<ReconfigureTrustRequest, Domain, OpMetadata> reconfigureTrustOperationSettings() { return getStubSettingsBuilder().reconfigureTrustOperationSettings(); } /** Returns the builder for the settings used for calls to detachTrust. */ public UnaryCallSettings.Builder<DetachTrustRequest, Operation> detachTrustSettings() { return getStubSettingsBuilder().detachTrustSettings(); } /** Returns the builder for the settings used for calls to detachTrust. */ public OperationCallSettings.Builder<DetachTrustRequest, Domain, OpMetadata> detachTrustOperationSettings() { return getStubSettingsBuilder().detachTrustOperationSettings(); } /** Returns the builder for the settings used for calls to validateTrust. */ public UnaryCallSettings.Builder<ValidateTrustRequest, Operation> validateTrustSettings() { return getStubSettingsBuilder().validateTrustSettings(); } /** Returns the builder for the settings used for calls to validateTrust. */ public OperationCallSettings.Builder<ValidateTrustRequest, Domain, OpMetadata> validateTrustOperationSettings() { return getStubSettingsBuilder().validateTrustOperationSettings(); } @Override public ManagedIdentitiesServiceSettings build() throws IOException { return new ManagedIdentitiesServiceSettings(this); } } }
/* * Copyright 2015-2017 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.apm.client.opentracing; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.HashMap; import java.util.Map; import org.hawkular.apm.api.model.Constants; import org.hawkular.apm.api.model.trace.Component; import org.hawkular.apm.api.model.trace.Consumer; import org.hawkular.apm.api.model.trace.CorrelationIdentifier; import org.hawkular.apm.api.model.trace.CorrelationIdentifier.Scope; import org.hawkular.apm.api.model.trace.Trace; import org.junit.Test; import io.opentracing.References; import io.opentracing.Span; import io.opentracing.SpanContext; import io.opentracing.Tracer; import io.opentracing.propagation.Format; import io.opentracing.propagation.TextMapExtractAdapter; /** * @author gbrown */ public class APMTracerReferenceTest { private static final String TEST_TXN = "TestBTxn"; private static final String TEST_APM_ID0 = "id0"; private static final String TEST_APM_ID1 = "id1"; private static final String TEST_APM_ID2 = "id2"; private static final String TEST_APM_TRACEID = "xyz"; @Test public void testNoReferences() { APMTracerTest.TestTraceRecorder reporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(reporter); Span span = tracer.buildSpan("NoReferences") .start(); span.finish(); assertEquals(1, reporter.getTraces().size()); Trace trace = reporter.getTraces().get(0); assertEquals(1, trace.getNodes().size()); assertEquals(Component.class, trace.getNodes().get(0).getClass()); assertTrue(trace.getNodes().get(0).getCorrelationIds().isEmpty()); assertEquals(0, ((Component) trace.getNodes().get(0)).getNodes().size()); } @Test public void testSingleExtractedChildOf() { APMTracerTest.TestTraceRecorder reporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(reporter); SpanContext spanCtx = extractedTraceState(tracer, TEST_APM_ID1); Span span = tracer.buildSpan("SingleChildOfSpanContext") .asChildOf(spanCtx) .start(); span.finish(); assertEquals(1, reporter.getTraces().size()); Trace trace = reporter.getTraces().get(0); assertEquals(1, trace.getNodes().size()); assertEquals(Consumer.class, trace.getNodes().get(0).getClass()); assertEquals(((Consumer) trace.getNodes().get(0)).getCorrelationIds().get(0), new CorrelationIdentifier(Scope.Interaction, TEST_APM_ID1)); assertEquals(0, ((Consumer) trace.getNodes().get(0)).getNodes().size()); } @Test public void testSingleExtractedFollowsFrom() { APMTracerTest.TestTraceRecorder testTraceReporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(testTraceReporter); SpanContext spanCtx = extractedTraceState(tracer, TEST_APM_ID1); Span span = tracer.buildSpan("root") .addReference(References.FOLLOWS_FROM, spanCtx) .start(); span.finish(); assertEquals(1, testTraceReporter.getTraces().size()); Trace trace = testTraceReporter.getTraces().get(0); assertEquals(1, trace.getNodes().size()); assertEquals(Consumer.class, trace.getNodes().get(0).getClass()); assertEquals(((Consumer) trace.getNodes().get(0)).getCorrelationIds().get(0), new CorrelationIdentifier(Scope.Interaction, TEST_APM_ID1)); assertEquals(0, ((Consumer) trace.getNodes().get(0)).getNodes().size()); } @Test public void testSingleChildOfSpan() { APMTracerTest.TestTraceRecorder reporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(reporter); Span parentSpan = tracer.buildSpan("ParentSpan") .start(); Span childSpan = tracer.buildSpan("ChildSpan") .asChildOf(parentSpan) .start(); childSpan.finish(); parentSpan.finish(); assertEquals(1, reporter.getTraces().size()); Trace trace = reporter.getTraces().get(0); assertEquals(1, trace.getNodes().size()); assertEquals(Component.class, trace.getNodes().get(0).getClass()); Component parentComponent = (Component) trace.getNodes().get(0); assertTrue(parentComponent.getCorrelationIds().isEmpty()); assertEquals(1, parentComponent.getNodes().size()); assertEquals(Component.class, parentComponent.getNodes().get(0).getClass()); Component childComponent = (Component) parentComponent.getNodes().get(0); assertTrue(childComponent.getCorrelationIds().isEmpty()); assertEquals(0, childComponent.getNodes().size()); } @Test public void testSingleChildOfSpanUsingContext() { APMTracerTest.TestTraceRecorder reporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(reporter); Span parentSpan = tracer.buildSpan("ParentSpan") .start(); Span childSpan = tracer.buildSpan("ChildSpan") .asChildOf(parentSpan.context()) .start(); childSpan.finish(); parentSpan.finish(); assertEquals(1, reporter.getTraces().size()); Trace trace = reporter.getTraces().get(0); assertEquals(1, trace.getNodes().size()); assertEquals(Component.class, trace.getNodes().get(0).getClass()); Component parentComponent = (Component) trace.getNodes().get(0); assertTrue(parentComponent.getCorrelationIds().isEmpty()); assertEquals(1, parentComponent.getNodes().size()); assertEquals(Component.class, parentComponent.getNodes().get(0).getClass()); Component childComponent = (Component) parentComponent.getNodes().get(0); assertTrue(childComponent.getCorrelationIds().isEmpty()); assertEquals(0, childComponent.getNodes().size()); } @Test public void testSingleFollowsFromRef() { APMTracerTest.TestTraceRecorder recorder = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(recorder); Span parentSpan = tracer.buildSpan("ParentSpan") .withTag("http.url", "http://localhost:8080/hello") .start(); parentSpan.finish(); Span followsFromSpan = tracer.buildSpan("FollowsFromSpan") .addReference(References.FOLLOWS_FROM, parentSpan.context()) .start(); followsFromSpan.finish(); assertEquals(2, recorder.getTraces().size()); Trace parentTrace = recorder.getTraces().get(0); assertEquals(1, parentTrace.getNodes().size()); assertEquals(Component.class, parentTrace.getNodes().get(0).getClass()); Component parentComponent = (Component) parentTrace.getNodes().get(0); assertTrue(parentComponent.getCorrelationIds().isEmpty()); assertEquals(0, parentComponent.getNodes().size()); Trace followsFromTrace = recorder.getTraces().get(1); assertEquals(parentTrace.getTraceId(), followsFromTrace.getTraceId()); assertEquals(1, followsFromTrace.getNodes().size()); // 'Consumer' introduced to link the followsFrom component to the referenced Span/Node assertEquals(Consumer.class, followsFromTrace.getNodes().get(0).getClass()); Consumer followsFromConsumer = (Consumer) followsFromTrace.getNodes().get(0); assertEquals("ParentSpan", followsFromConsumer.getOperation()); assertEquals("/hello", followsFromConsumer.getUri()); assertEquals(followsFromConsumer.getCorrelationIds().get(0), new CorrelationIdentifier(Scope.CausedBy, parentTrace.getFragmentId() + ":0")); assertEquals(1, followsFromConsumer.getNodes().size()); assertEquals(Component.class, followsFromConsumer.getNodes().get(0).getClass()); Component followsFromComponent = (Component) followsFromConsumer.getNodes().get(0); assertTrue(followsFromComponent.getCorrelationIds().isEmpty()); assertEquals(0, followsFromComponent.getNodes().size()); } /** * This test shows multiple FollowsFrom references with just a single ChildOf(SpanContext). */ @Test public void testSingleChildOfSpanContextWithOtherFollowsFromRefs() { APMTracerTest.TestTraceRecorder reporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(reporter); SpanContext spanCtx1 = extractedTraceState(tracer, TEST_APM_ID1); Span rootSpan = tracer.buildSpan("root") .asChildOf(spanCtx1) .start(); Span refdSpan1 = tracer.buildSpan("ref1") .asChildOf(rootSpan) .start(); refdSpan1.finish(); Span refdSpan2 = tracer.buildSpan("ref2") .asChildOf(rootSpan) .start(); refdSpan2.finish(); rootSpan.finish(); SpanContext spanCtx2 = extractedTraceState(tracer, TEST_APM_ID2); Span span = tracer.buildSpan("SingleChildOfSpanContext") .addReference(References.FOLLOWS_FROM, refdSpan1.context()) .asChildOf(spanCtx2) .addReference(References.FOLLOWS_FROM, refdSpan2.context()) .start(); span.finish(); assertEquals(2, reporter.getTraces().size()); Trace trace1 = reporter.getTraces().get(0); Trace trace2 = reporter.getTraces().get(1); assertEquals(1, trace2.getNodes().size()); assertEquals(Consumer.class, trace2.getNodes().get(0).getClass()); assertTrue(((Consumer) trace2.getNodes().get(0)).getCorrelationIds().contains( new CorrelationIdentifier(Scope.Interaction, TEST_APM_ID2))); assertTrue(((Consumer) trace2.getNodes().get(0)).getCorrelationIds().contains( new CorrelationIdentifier(Scope.CausedBy, trace1.getFragmentId() + ":0:0"))); assertTrue(((Consumer) trace2.getNodes().get(0)).getCorrelationIds().contains( new CorrelationIdentifier(Scope.CausedBy, trace1.getFragmentId() + ":0:1"))); assertEquals(0, ((Consumer) trace2.getNodes().get(0)).getNodes().size()); } /** * This test shows multiple FollowsFrom references with just a single ChildOf(Span). */ @Test public void testSingleChildOfSpanWithOtherFollowsFromRefs() { APMTracerTest.TestTraceRecorder reporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(reporter); Span rootSpan = tracer.buildSpan("root") .start(); Span childSpan1 = tracer.buildSpan("child1") .asChildOf(rootSpan) .start(); childSpan1.finish(); Span childSpan2 = tracer.buildSpan("child2") .asChildOf(rootSpan) .start(); Span referencingSpan = tracer.buildSpan("referencingSpan") .addReference(References.FOLLOWS_FROM, childSpan1.context()) .asChildOf(childSpan2) .start(); childSpan2.finish(); rootSpan.finish(); referencingSpan.finish(); assertEquals(1, reporter.getTraces().size()); Trace trace = reporter.getTraces().get(0); assertEquals(1, trace.getNodes().size()); assertEquals(Component.class, trace.getNodes().get(0).getClass()); Component rootComponent = (Component) trace.getNodes().get(0); assertEquals(2, rootComponent.getNodes().size()); assertEquals(Component.class, rootComponent.getNodes().get(0).getClass()); assertEquals(Component.class, rootComponent.getNodes().get(1).getClass()); Component child1Component = (Component) rootComponent.getNodes().get(0); Component child2Component = (Component) rootComponent.getNodes().get(1); assertEquals(0, child1Component.getNodes().size()); assertEquals(1, child2Component.getNodes().size()); assertEquals(Component.class, child2Component.getNodes().get(0).getClass()); Component referencingComponent = (Component) child2Component.getNodes().get(0); assertTrue(referencingComponent.getCorrelationIds().contains( new CorrelationIdentifier(Scope.CausedBy, trace.getFragmentId() + ":0:0"))); } /** * This test defines three references, 'refdSpan1' which is the FollowFrom, * 'refdSpan2' which is the ChildOf ref (so real parent), but it is overridden * by the extracted 'spanCtx' - and so the real parent/child relationship is * changed to a casual link. */ @Test public void testSingleExtractedSpanContextWithOtherChildOfSpanAndFollowsFromRefs() { APMTracerTest.TestTraceRecorder reporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(reporter); SpanContext spanCtx1 = extractedTraceState(tracer, TEST_APM_ID1); Span rootSpan = tracer.buildSpan("root") .asChildOf(spanCtx1) .start(); Span refdSpan1 = tracer.buildSpan("ref1") .asChildOf(rootSpan) .start(); refdSpan1.finish(); Span refdSpan2 = tracer.buildSpan("ref2") .asChildOf(rootSpan) .start(); refdSpan2.finish(); rootSpan.finish(); SpanContext spanCtx = extractedTraceState(tracer, TEST_APM_ID2); Span span = tracer.buildSpan("SingleChildOfSpanContext") .asChildOf(refdSpan2) .asChildOf(spanCtx) .addReference(References.FOLLOWS_FROM, refdSpan1.context()) .start(); refdSpan2.finish(); span.finish(); assertEquals(2, reporter.getTraces().size()); Trace trace1 = reporter.getTraces().get(0); Trace trace2 = reporter.getTraces().get(1); assertEquals(1, trace2.getNodes().size()); assertEquals(Consumer.class, trace2.getNodes().get(0).getClass()); assertTrue(((Consumer) trace2.getNodes().get(0)).getCorrelationIds().contains( new CorrelationIdentifier(Scope.Interaction, TEST_APM_ID2))); assertTrue(((Consumer) trace2.getNodes().get(0)).getCorrelationIds().contains( new CorrelationIdentifier(Scope.CausedBy, trace1.getFragmentId() + ":0:0"))); assertTrue(((Consumer) trace2.getNodes().get(0)).getCorrelationIds().contains( new CorrelationIdentifier(Scope.CausedBy, trace1.getFragmentId() + ":0:1"))); assertEquals(0, ((Consumer) trace2.getNodes().get(0)).getNodes().size()); } @Test public void testMultipleVariousChildOfAndFollowsFromRefsSameTraceInstance() { APMTracerTest.TestTraceRecorder reporter = new APMTracerTest.TestTraceRecorder(); Tracer tracer = new APMTracer(reporter); SpanContext parentSpanCtx = extractedTraceState(tracer, TEST_APM_ID0); Span refdParent = tracer.buildSpan("refParent") .asChildOf(parentSpanCtx) .start(); Span refdSpan1 = tracer.buildSpan("ref1") .asChildOf(refdParent) .start(); refdSpan1.finish(); Span refdSpan2 = tracer.buildSpan("ref2") .asChildOf(refdParent) .start(); refdParent.finish(); SpanContext spanCtx1 = extractedTraceState(tracer, TEST_APM_ID1); SpanContext spanCtx2 = extractedTraceState(tracer, TEST_APM_ID2); Span span = tracer.buildSpan("JoinForSameTraceInstance") .asChildOf(refdSpan2) .asChildOf(spanCtx1) .addReference(References.FOLLOWS_FROM, refdSpan1.context()) .asChildOf(spanCtx2) .start(); refdSpan2.finish(); span.finish(); assertEquals(2, reporter.getTraces().size()); Trace trace1 = reporter.getTraces().get(0); Trace trace2 = reporter.getTraces().get(1); assertNotNull(trace1.getTraceId()); assertNotNull(trace2.getTraceId()); assertTrue(trace1.getTraceId().equals(trace2.getTraceId())); // Verify trace1 structure of Consumer node with two Component children assertEquals(1, trace1.getNodes().size()); assertEquals(Consumer.class, trace1.getNodes().get(0).getClass()); assertTrue(((Consumer) trace1.getNodes().get(0)).getCorrelationIds().contains( new CorrelationIdentifier(Scope.Interaction, TEST_APM_ID0))); assertEquals(2, ((Consumer) trace1.getNodes().get(0)).getNodes().size()); assertEquals(Component.class, ((Consumer) trace1.getNodes().get(0)).getNodes().get(0).getClass()); assertEquals(Component.class, ((Consumer) trace1.getNodes().get(0)).getNodes().get(1).getClass()); // Verify trace2 structure as a 'join' of various ref types assertEquals(1, trace2.getNodes().size()); assertEquals(Consumer.class, trace2.getNodes().get(0).getClass()); Consumer joinConsumer = (Consumer)trace2.getNodes().get(0); assertTrue(joinConsumer.getCorrelationIds().contains( new CorrelationIdentifier(Scope.Interaction, TEST_APM_ID1))); assertTrue(joinConsumer.getCorrelationIds().contains( new CorrelationIdentifier(Scope.Interaction, TEST_APM_ID2))); assertTrue(joinConsumer.getCorrelationIds().contains( new CorrelationIdentifier(Scope.CausedBy, trace1.getFragmentId() + ":0:0"))); assertTrue(joinConsumer.getCorrelationIds().contains( new CorrelationIdentifier(Scope.CausedBy, trace1.getFragmentId() + ":0:1"))); assertEquals(1, joinConsumer.getNodes().size()); assertEquals(Component.class, joinConsumer.getNodes().get(0).getClass()); Component joinComponent = (Component)joinConsumer.getNodes().get(0); assertEquals("JoinForSameTraceInstance", joinComponent.getOperation()); } protected SpanContext extractedTraceState(Tracer tracer, String id) { Map<String, String> headers = new HashMap<>(); headers.put(Constants.HAWKULAR_APM_TRACEID, TEST_APM_TRACEID); headers.put(Constants.HAWKULAR_APM_ID, id); headers.put(Constants.HAWKULAR_APM_TXN, TEST_TXN); return tracer.extract(Format.Builtin.TEXT_MAP, new TextMapExtractAdapter(headers)); } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.cxx.platform.ObjectFileCommonModificationDate; import com.facebook.buck.cxx.platform.ObjectFileScrubbers; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.primitives.Ints; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.Map; public class Machos { // http://www.opensource.apple.com/source/xnu/xnu-1699.32.7/EXTERNAL_HEADERS/mach-o/loader.h // File magic static final byte[] MH_MAGIC = Ints.toByteArray(0xFEEDFACE); static final byte[] MH_MAGIC_64 = Ints.toByteArray(0xFEEDFACF); static final byte[] MH_CIGAM = Ints.toByteArray(0xCEFAEDFE); static final byte[] MH_CIGAM_64 = Ints.toByteArray(0xCFFAEDFE); // Map segment load command static final int LC_SEGMENT = 0x1; // Symbol table load command static final int LC_SYMTAB = 0x2; // UUID load command static final int LC_UUID = 0x1B; // Map 64 bit segment load command static final int LC_SEGMENT_64 = 0x19; // http://www.opensource.apple.com/source/xnu/xnu-1699.32.7/EXTERNAL_HEADERS/mach-o/stab.h // Description of object file STAB entries static final short N_OSO = (short) 0x66; private Machos() {} static void setUuid(MappedByteBuffer map, byte[] uuid) throws MachoException { int commandsCount = getHeader(map).getCommandsCount(); for (int i = 0; i < commandsCount; i++) { int command = ObjectFileScrubbers.getLittleEndianInt(map); int commandSize = ObjectFileScrubbers.getLittleEndianInt(map); if (LC_UUID == command) { ObjectFileScrubbers.putBytes(map, uuid); return; } else { /* Command body */ ObjectFileScrubbers.getBytes(map, commandSize - 8); } } throw new MachoException("LC_UUID command not found"); } static boolean isMacho(FileChannel file) throws IOException { MappedByteBuffer map = file.map(FileChannel.MapMode.READ_ONLY, 0, MH_MAGIC.length); byte[] magic = ObjectFileScrubbers.getBytes(map, MH_MAGIC.length); return Arrays.equals(MH_MAGIC, magic) || Arrays.equals(MH_CIGAM, magic) || Arrays.equals(MH_MAGIC_64, magic) || Arrays.equals(MH_CIGAM_64, magic); } /** * Relativize paths in OSO entries. * * <p>OSO entries point to other files containing debug information. These are generated by the * linker as absolute paths. */ static void relativizeOsoSymbols(FileChannel file, ImmutableMap<Path, Path> cellRoots) throws IOException, MachoException { cellRoots.forEach( (from, to) -> { Preconditions.checkArgument(from.isAbsolute()); Preconditions.checkArgument(!to.isAbsolute()); }); long size = file.size(); MappedByteBuffer map = file.map(FileChannel.MapMode.READ_WRITE, 0, size); MachoHeader header = getHeader(map); int symbolTableOffset = 0; int symbolTableCount = 0; int stringTableOffset = 0; int stringTableSizePosition = 0; int stringTableSize = 0; boolean symbolTableSegmentFound = false; int segmentSizePosition = 0; int segmentSize = 0; int commandsCount = header.getCommandsCount(); for (int i = 0; i < commandsCount; i++) { int commandStart = map.position(); // NOPMD int command = ObjectFileScrubbers.getLittleEndianInt(map); int commandSize = ObjectFileScrubbers.getLittleEndianInt(map); // NOPMD switch (command) { case LC_SYMTAB: symbolTableOffset = ObjectFileScrubbers.getLittleEndianInt(map); symbolTableCount = ObjectFileScrubbers.getLittleEndianInt(map); stringTableOffset = ObjectFileScrubbers.getLittleEndianInt(map); stringTableSizePosition = map.position(); stringTableSize = ObjectFileScrubbers.getLittleEndianInt(map); symbolTableSegmentFound = true; break; case LC_SEGMENT: /* segment name */ ObjectFileScrubbers.getBytes(map, 16); /* vm address */ ObjectFileScrubbers.getLittleEndianInt(map); /* vm size */ ObjectFileScrubbers.getLittleEndianInt(map); int segmentFileOffset = ObjectFileScrubbers.getLittleEndianInt(map); int segmentFileSizePosition = map.position(); int segmentFileSize = ObjectFileScrubbers.getLittleEndianInt(map); /* maximum vm protection */ ObjectFileScrubbers.getLittleEndianInt(map); /* initial vm protection */ ObjectFileScrubbers.getLittleEndianInt(map); /* number of sections */ ObjectFileScrubbers.getLittleEndianInt(map); /* flags */ ObjectFileScrubbers.getLittleEndianInt(map); if (segmentFileOffset + segmentFileSize == size) { if (segmentSizePosition != 0) { throw new MachoException("multiple map segment commands map string table"); } segmentSizePosition = segmentFileSizePosition; segmentSize = segmentFileSize; } break; case LC_SEGMENT_64: /* segment name */ ObjectFileScrubbers.getBytes(map, 16); /* vm address */ ObjectFileScrubbers.getLittleEndianLong(map); /* vm size */ ObjectFileScrubbers.getLittleEndianLong(map); long segment64FileOffset = ObjectFileScrubbers.getLittleEndianLong(map); int segment64FileSizePosition = map.position(); long segment64FileSize = ObjectFileScrubbers.getLittleEndianLong(map); /* maximum vm protection */ ObjectFileScrubbers.getLittleEndianInt(map); /* initial vm protection */ ObjectFileScrubbers.getLittleEndianInt(map); /* number of sections */ ObjectFileScrubbers.getLittleEndianInt(map); /* flags */ ObjectFileScrubbers.getLittleEndianInt(map); if (segment64FileOffset + segment64FileSize == size) { if (segmentSizePosition != 0) { throw new MachoException("multiple map segment commands map string table"); } segmentSizePosition = segment64FileSizePosition; if (segment64FileSize > Ints.MAX_POWER_OF_TWO) { throw new MachoException("map segment file size too big"); } segmentSize = (int) segment64FileSize; } break; } map.position(commandStart + commandSize); } if (!symbolTableSegmentFound) { throw new MachoException("LC_SYMTAB command not found"); } if (stringTableOffset + stringTableSize != size) { throw new MachoException("String table does not end at end of file"); } if (stringTableSize == 0) { return; } if (segmentSizePosition == 0 || segmentSize == 0) { throw new MachoException("LC_SEGMENT or LC_SEGMENT_64 command for string table not found"); } map.position(stringTableOffset); if (map.get() != 0x20) { throw new MachoException("First character in the string table is not a space"); } if (map.get() != 0x00) { throw new MachoException("Second character in the string table is not a NUL"); } int currentStringTableOffset = map.position(); byte[] stringTableBytes = new byte[stringTableSize]; map.position(stringTableOffset); map.get(stringTableBytes); ByteBuffer stringTable = ByteBuffer.wrap(stringTableBytes); map.position(symbolTableOffset); Map<Integer, Integer> strings = new HashMap<>(); for (int i = 0; i < symbolTableCount; i++) { int stringTableIndexPosition = map.position(); int stringTableIndex = ObjectFileScrubbers.getLittleEndianInt(map); byte type = map.get(); /* section */ map.get(); /* description */ ObjectFileScrubbers.getLittleEndianShort(map); int valuePosition = map.position(); if (header.getIs64Bit()) { /* value */ ObjectFileScrubbers.getLittleEndianLong(map); } else { /* value */ ObjectFileScrubbers.getLittleEndianInt(map); } if (stringTableIndex < 2) { continue; } int position = map.position(); try { int newStringTableIndex; if (strings.containsKey(stringTableIndex)) { newStringTableIndex = strings.get(stringTableIndex); } else { stringTable.position(stringTableIndex); String string = ObjectFileScrubbers.getAsciiString(stringTable); if (type == N_OSO) { for (Map.Entry<Path, Path> root : cellRoots.entrySet()) { String rootPrefix = root.getKey() + "/"; if (string.startsWith(rootPrefix)) { String replacementPrefix = root.getValue().toString(); if (replacementPrefix.equals("")) { replacementPrefix = "."; } string = replacementPrefix + "/" + string.substring(rootPrefix.length()); } } map.position(valuePosition); int lastModifiedValue = ObjectFileCommonModificationDate.COMMON_MODIFICATION_TIME_STAMP; if (header.getIs64Bit()) { ObjectFileScrubbers.putLittleEndianLong(map, lastModifiedValue); } else { ObjectFileScrubbers.putLittleEndianInt(map, lastModifiedValue); } } map.position(currentStringTableOffset); ObjectFileScrubbers.putAsciiString(map, string); newStringTableIndex = currentStringTableOffset - stringTableOffset; currentStringTableOffset = map.position(); strings.put(stringTableIndex, newStringTableIndex); } map.position(stringTableIndexPosition); ObjectFileScrubbers.putLittleEndianInt(map, newStringTableIndex); } finally { map.position(position); } } map.position(stringTableSizePosition); int newStringTableSize = currentStringTableOffset - stringTableOffset; ObjectFileScrubbers.putLittleEndianInt(map, newStringTableSize); map.position(segmentSizePosition); ObjectFileScrubbers.putLittleEndianInt( map, segmentSize + (newStringTableSize - stringTableSize)); file.truncate(currentStringTableOffset); } private static MachoHeader getHeader(MappedByteBuffer map) throws MachoException { byte[] magic = ObjectFileScrubbers.getBytes(map, MH_MAGIC.length); boolean is64bit; if (Arrays.equals(MH_MAGIC, magic) || Arrays.equals(MH_CIGAM, magic)) { is64bit = false; } else if (Arrays.equals(MH_MAGIC_64, magic) || Arrays.equals(MH_CIGAM_64, magic)) { is64bit = true; } else { throw new MachoException("invalid Mach-O magic"); } /* CPU type */ ObjectFileScrubbers.getLittleEndianInt(map); /* CPU subtype */ ObjectFileScrubbers.getLittleEndianInt(map); /* File type */ ObjectFileScrubbers.getLittleEndianInt(map); int commandsCount = ObjectFileScrubbers.getLittleEndianInt(map); /* Commands size */ ObjectFileScrubbers.getLittleEndianInt(map); /* Flags */ ObjectFileScrubbers.getLittleEndianInt(map); if (is64bit) { /* reserved */ ObjectFileScrubbers.getLittleEndianInt(map); } return MachoHeader.of(commandsCount, is64bit); } @SuppressWarnings("serial") public static class MachoException extends Exception { public MachoException(String msg) { super(msg); } } }
/** */ package rgse.ttc17.emoflon.tgg.task2.impl; import gluemodel.CIM.IEC61970.Core.Terminal; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.moflon.tgg.runtime.impl.AbstractCorrespondenceImpl; import rgse.ttc17.emoflon.tgg.task2.Task2Package; import rgse.ttc17.emoflon.tgg.task2.TerminalToTerminal; // <-- [user defined imports] // [user defined imports] --> /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Terminal To Terminal</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link rgse.ttc17.emoflon.tgg.task2.impl.TerminalToTerminalImpl#getSource <em>Source</em>}</li> * <li>{@link rgse.ttc17.emoflon.tgg.task2.impl.TerminalToTerminalImpl#getTarget <em>Target</em>}</li> * </ul> * </p> * * @generated */ public class TerminalToTerminalImpl extends AbstractCorrespondenceImpl implements TerminalToTerminal { /** * The cached value of the '{@link #getSource() <em>Source</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSource() * @generated * @ordered */ protected Terminal source; /** * The cached value of the '{@link #getTarget() <em>Target</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTarget() * @generated * @ordered */ protected outagePreventionJointarget.Terminal target; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TerminalToTerminalImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return Task2Package.Literals.TERMINAL_TO_TERMINAL; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Terminal getSource() { if (source != null && source.eIsProxy()) { InternalEObject oldSource = (InternalEObject) source; source = (Terminal) eResolveProxy(oldSource); if (source != oldSource) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, Task2Package.TERMINAL_TO_TERMINAL__SOURCE, oldSource, source)); } } return source; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Terminal basicGetSource() { return source; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSource(Terminal newSource) { Terminal oldSource = source; source = newSource; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Task2Package.TERMINAL_TO_TERMINAL__SOURCE, oldSource, source)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public outagePreventionJointarget.Terminal getTarget() { if (target != null && target.eIsProxy()) { InternalEObject oldTarget = (InternalEObject) target; target = (outagePreventionJointarget.Terminal) eResolveProxy(oldTarget); if (target != oldTarget) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, Task2Package.TERMINAL_TO_TERMINAL__TARGET, oldTarget, target)); } } return target; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public outagePreventionJointarget.Terminal basicGetTarget() { return target; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTarget(outagePreventionJointarget.Terminal newTarget) { outagePreventionJointarget.Terminal oldTarget = target; target = newTarget; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Task2Package.TERMINAL_TO_TERMINAL__TARGET, oldTarget, target)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case Task2Package.TERMINAL_TO_TERMINAL__SOURCE: if (resolve) return getSource(); return basicGetSource(); case Task2Package.TERMINAL_TO_TERMINAL__TARGET: if (resolve) return getTarget(); return basicGetTarget(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case Task2Package.TERMINAL_TO_TERMINAL__SOURCE: setSource((Terminal) newValue); return; case Task2Package.TERMINAL_TO_TERMINAL__TARGET: setTarget((outagePreventionJointarget.Terminal) newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case Task2Package.TERMINAL_TO_TERMINAL__SOURCE: setSource((Terminal) null); return; case Task2Package.TERMINAL_TO_TERMINAL__TARGET: setTarget((outagePreventionJointarget.Terminal) null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case Task2Package.TERMINAL_TO_TERMINAL__SOURCE: return source != null; case Task2Package.TERMINAL_TO_TERMINAL__TARGET: return target != null; } return super.eIsSet(featureID); } // <-- [user code injected with eMoflon] // [user code injected with eMoflon] --> } //TerminalToTerminalImpl
/* * Copyright 2016 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.util; import com.googlecode.junit.ext.JunitExtRunner; import com.googlecode.junit.ext.RunIf; import com.rits.cloning.Cloner; import com.thoughtworks.go.junitext.DatabaseChecker; import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.io.File; import java.util.Properties; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; import static org.junit.Assert.*; @RunWith(JunitExtRunner.class) public class SystemEnvironmentTest { static final Cloner CLONER = new Cloner(); private Properties original; private SystemEnvironment systemEnvironment; @Before public void before() { original = CLONER.deepClone(System.getProperties()); systemEnvironment = new SystemEnvironment(); } @After public void after() { System.setProperties(original); new SystemEnvironment().reset(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE); } @Test public void shouldDisableNewFeaturesByDefault() { assertThat(systemEnvironment.isFeatureEnabled("cruise.experimental.feature.some-feature"), is(false)); } @Test public void shouldBeAbletoEnableAllNewFeatures() { Properties properties = new Properties(); properties.setProperty(SystemEnvironment.CRUISE_EXPERIMENTAL_ENABLE_ALL, "true"); SystemEnvironment systemEnvironment = new SystemEnvironment(properties); assertThat(systemEnvironment.isFeatureEnabled("cruise.experimental.feature.some-feature"), is(true)); } @Test public void shouldFindJettyConfigInTheConfigDir() { assertThat(systemEnvironment.getJettyConfigFile(), is(new File(systemEnvironment.getConfigDir(), "jetty.xml"))); systemEnvironment.set(SystemEnvironment.JETTY_XML_FILE_NAME, "jetty-old.xml"); assertThat(systemEnvironment.getJettyConfigFile(), is(new File(systemEnvironment.getConfigDir(), "jetty-old.xml"))); } @Test public void shouldUnderstandOperatingSystem() { assertThat(systemEnvironment.getOperatingSystemName(), is(System.getProperty("os.name"))); } @Test public void shouldUnderstandWetherToUseCompressedJs() throws Exception { assertThat(systemEnvironment.useCompressedJs(), is(true)); systemEnvironment.setProperty(GoConstants.USE_COMPRESSED_JAVASCRIPT, Boolean.FALSE.toString()); assertThat(systemEnvironment.useCompressedJs(), is(false)); systemEnvironment.setProperty(GoConstants.USE_COMPRESSED_JAVASCRIPT, Boolean.TRUE.toString()); assertThat(systemEnvironment.useCompressedJs(), is(true)); } @Test public void shouldHaveBaseUrl() { assertThat(systemEnvironment.getBaseUrlForShine(), is("http://localhost:8153/go")); } @Test public void shouldHaveBaseUrlSsl() { assertThat(systemEnvironment.getBaseSslUrlForShine(), is("https://localhost:8154/go")); } @Test public void shouldCacheAgentConnectionSystemPropertyOnFirstAccess() { System.setProperty(SystemEnvironment.AGENT_CONNECTION_TIMEOUT_IN_SECONDS, "1"); assertThat(systemEnvironment.getAgentConnectionTimeout(), is(1)); System.setProperty(SystemEnvironment.AGENT_CONNECTION_TIMEOUT_IN_SECONDS, "2"); assertThat(systemEnvironment.getAgentConnectionTimeout(), is(1)); } @Test public void shouldCacheSslPortSystemPropertyOnFirstAccess() { System.setProperty(SystemEnvironment.CRUISE_SERVER_SSL_PORT, "8154"); assertThat(systemEnvironment.getSslServerPort(), is(8154)); System.setProperty(SystemEnvironment.CRUISE_SERVER_SSL_PORT, "20000"); assertThat(systemEnvironment.getSslServerPort(), is(8154)); } @Test public void shouldCacheConfigDirOnFirstAccess() { assertThat(systemEnvironment.getConfigDir(), is("config")); System.setProperty(SystemEnvironment.CONFIG_DIR_PROPERTY, "raghu"); assertThat(systemEnvironment.getConfigDir(), is("config")); } @Test public void shouldCacheConfigFilePathOnFirstAccess() { assertThat(systemEnvironment.configDir(), is(new File("config"))); System.setProperty(SystemEnvironment.CONFIG_FILE_PROPERTY, "foo"); assertThat(systemEnvironment.getConfigDir(), is("config")); } @Test public void shouldCacheDatabaseDiskFullOnFirstAccess() { System.setProperty(SystemEnvironment.DATABASE_FULL_SIZE_LIMIT, "100"); assertThat(systemEnvironment.getDatabaseDiskSpaceFullLimit(), is(100L)); System.setProperty(SystemEnvironment.DATABASE_FULL_SIZE_LIMIT, "50M"); assertThat(systemEnvironment.getDatabaseDiskSpaceFullLimit(), is(100L)); } @Test public void shouldCacheArtifactDiskFullOnFirstAccess() { System.setProperty(SystemEnvironment.ARTIFACT_FULL_SIZE_LIMIT, "100"); assertThat(systemEnvironment.getArtifactReposiotryFullLimit(), is(100L)); System.setProperty(SystemEnvironment.ARTIFACT_FULL_SIZE_LIMIT, "50M"); assertThat(systemEnvironment.getArtifactReposiotryFullLimit(), is(100L)); } @Test public void shouldClearCachedValuesOnSettingNewProperty() { System.setProperty(SystemEnvironment.ARTIFACT_FULL_SIZE_LIMIT, "100"); assertThat(systemEnvironment.getArtifactReposiotryFullLimit(), is(100L)); systemEnvironment.setProperty(SystemEnvironment.ARTIFACT_FULL_SIZE_LIMIT, "50"); assertThat(systemEnvironment.getArtifactReposiotryFullLimit(), is(50L)); } @Test public void shouldPrefixApplicationPathWithContext() { assertThat(systemEnvironment.pathFor("foo/bar"), is("/go/foo/bar")); assertThat(systemEnvironment.pathFor("/baz/quux"), is("/go/baz/quux")); } @Test public void shouldUnderstandConfigRepoDir() { Properties properties = new Properties(); SystemEnvironment systemEnvironment = new SystemEnvironment(properties); assertThat(systemEnvironment.getConfigRepoDir(), is(new File("db/config.git"))); properties.setProperty(SystemEnvironment.CRUISE_CONFIG_REPO_DIR, "foo/bar.git"); assertThat(systemEnvironment.getConfigRepoDir(), is(new File("foo/bar.git"))); } @Test public void shouldUnderstandMaterialUpdateInterval() { assertThat(systemEnvironment.getMaterialUpdateIdleInterval(), is(60000L)); systemEnvironment.setProperty(SystemEnvironment.MATERIAL_UPDATE_IDLE_INTERVAL_PROPERTY, "20"); assertThat(systemEnvironment.getMaterialUpdateIdleInterval(), is(20L)); } @Test public void shouldUnderstandH2CacheSize() { assertThat(systemEnvironment.getCruiseDbCacheSize(), is(String.valueOf(128 * 1024))); System.setProperty(SystemEnvironment.CRUISE_DB_CACHE_SIZE, String.valueOf(512 * 1024)); assertThat(systemEnvironment.getCruiseDbCacheSize(), is(String.valueOf(512 * 1024))); } @Test public void shouldReturnTheJobWarningLimit() { assertThat(systemEnvironment.getUnresponsiveJobWarningThreshold(), is(5 * 60 * 1000L)); System.setProperty(SystemEnvironment.UNRESPONSIVE_JOB_WARNING_THRESHOLD, "30"); assertThat(systemEnvironment.getUnresponsiveJobWarningThreshold(), is(30 * 60 * 1000L)); } @Test public void shouldReturnTheDefaultValueForActiveMqUseJMX() { assertThat(systemEnvironment.getActivemqUseJmx(), is(false)); System.setProperty(SystemEnvironment.ACTIVEMQ_USE_JMX, "true"); assertThat(systemEnvironment.getActivemqUseJmx(), is(true)); } @Test public void shouldGetPluginEnabledStatusAsFalseIfNoEnvironmentVariableSet() { assertThat(systemEnvironment.pluginStatus(), is(GoConstants.ENABLE_PLUGINS_RESPONSE_FALSE)); } @Test public void shouldGetPluginEnabledStatusAsFalseIfPropertyIsSetToN() { System.setProperty(GoConstants.ENABLE_PLUGINS_PROPERTY, "N"); assertThat(systemEnvironment.pluginStatus(), is(GoConstants.ENABLE_PLUGINS_RESPONSE_FALSE)); } @Test public void shouldGetPluginEnabledStatusAsTrueIfPropertyIsSetToY() { System.setProperty(GoConstants.ENABLE_PLUGINS_PROPERTY, "Y"); assertThat(systemEnvironment.pluginStatus(), is(GoConstants.ENABLE_PLUGINS_RESPONSE_TRUE)); } @Test public void shouldReturnTrueWhenPluginsAreEnabled() { System.setProperty(GoConstants.ENABLE_PLUGINS_PROPERTY, "Y"); assertThat(systemEnvironment.isPluginsEnabled(), is(true)); } @Test public void shouldReturnFalseWhenPluginsAreNotEnabled() { System.setProperty(GoConstants.ENABLE_PLUGINS_PROPERTY, "N"); assertThat(systemEnvironment.isPluginsEnabled(), is(false)); } @Test public void shouldReadAgentBootstrapperVersion() { try { System.setProperty(GoConstants.AGENT_LAUNCHER_VERSION, "12.2"); assertThat(systemEnvironment.getAgentLauncherVersion(), is("12.2")); } finally { System.setProperty(GoConstants.AGENT_LAUNCHER_VERSION, ""); } } @Test public void shouldDefaultAgentBootstrapperVersionToEmptyString() { assertThat(systemEnvironment.getAgentLauncherVersion(), is("")); } @Test public void shouldResolveRevisionsForDependencyGraph_byDefault() { assertThat(System.getProperty(SystemEnvironment.RESOLVE_FANIN_REVISIONS), nullValue()); assertThat(new SystemEnvironment().enforceRevisionCompatibilityWithUpstream(), is(true)); } @Test public void should_NOT_resolveRevisionsForDependencyGraph_whenExplicitlyDisabled() { System.setProperty(SystemEnvironment.RESOLVE_FANIN_REVISIONS, SystemEnvironment.CONFIGURATION_NO); assertThat(new SystemEnvironment().enforceRevisionCompatibilityWithUpstream(), is(false)); } @Test public void shouldResolveRevisionsForDependencyGraph_whenEnabledExplicitly() { System.setProperty(SystemEnvironment.RESOLVE_FANIN_REVISIONS, SystemEnvironment.CONFIGURATION_YES); assertThat(new SystemEnvironment().enforceRevisionCompatibilityWithUpstream(), is(true)); } @Test public void should_cache_whetherToResolveRevisionsForDependencyGraph() {//because access to properties is synchronized assertThat(System.getProperty(SystemEnvironment.RESOLVE_FANIN_REVISIONS), nullValue()); SystemEnvironment systemEnvironment = new SystemEnvironment(); assertThat(systemEnvironment.enforceRevisionCompatibilityWithUpstream(), is(true)); System.setProperty(SystemEnvironment.RESOLVE_FANIN_REVISIONS, SystemEnvironment.CONFIGURATION_NO); assertThat(systemEnvironment.enforceRevisionCompatibilityWithUpstream(), is(true)); } @Test public void shouldTurnOnConfigMergeFeature_byDefault() { assertThat(System.getProperty(SystemEnvironment.ENABLE_CONFIG_MERGE_PROPERTY), nullValue()); assertThat(new SystemEnvironment().get(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE), is(true)); } @Test public void should_NOT_TurnOnConfigMergeFeature_whenExplicitlyDisabled() { System.setProperty(SystemEnvironment.ENABLE_CONFIG_MERGE_PROPERTY, SystemEnvironment.CONFIGURATION_NO); assertThat(new SystemEnvironment().get(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE), is(false)); } @Test public void shouldTurnOnConfigMergeFeature_whenEnabledExplicitly() { System.setProperty(SystemEnvironment.ENABLE_CONFIG_MERGE_PROPERTY, SystemEnvironment.CONFIGURATION_YES); assertThat(new SystemEnvironment().get(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE), is(true)); } @Test public void should_cache_whetherToTurnOnConfigMergeFeature() {//because access to properties is synchronized assertThat(System.getProperty(SystemEnvironment.ENABLE_CONFIG_MERGE_PROPERTY), nullValue()); assertThat(new SystemEnvironment().get(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE), is(true)); System.setProperty(SystemEnvironment.ENABLE_CONFIG_MERGE_PROPERTY, SystemEnvironment.CONFIGURATION_NO); assertThat(new SystemEnvironment().get(SystemEnvironment.ENABLE_CONFIG_MERGE_FEATURE), is(true)); } @Test public void shouldGetTfsSocketTimeOut() { assertThat(systemEnvironment.getTfsSocketTimeout(), is(SystemEnvironment.TFS_SOCKET_TIMEOUT_IN_MILLISECONDS)); System.setProperty(SystemEnvironment.TFS_SOCKET_TIMEOUT_PROPERTY, "100000000"); assertThat(systemEnvironment.getTfsSocketTimeout(), is(100000000)); } @Test public void shouldGiveINFOAsTheDefaultLevelOfAPluginWithoutALoggingLevelSet() throws Exception { assertThat(systemEnvironment.pluginLoggingLevel("some-plugin-1"), is(Level.INFO)); } @Test public void shouldGiveINFOAsTheDefaultLevelOfAPluginWithAnInvalidLoggingLevelSet() throws Exception { System.setProperty("plugin.some-plugin-2.log.level", "SOME-INVALID-LOG-LEVEL"); assertThat(systemEnvironment.pluginLoggingLevel("some-plugin-2"), is(Level.INFO)); } @Test public void shouldGiveTheLevelOfAPluginWithALoggingLevelSet() throws Exception { System.setProperty("plugin.some-plugin-3.log.level", "DEBUG"); System.setProperty("plugin.some-plugin-4.log.level", "INFO"); System.setProperty("plugin.some-plugin-5.log.level", "WARN"); System.setProperty("plugin.some-plugin-6.log.level", "ERROR"); assertThat(systemEnvironment.pluginLoggingLevel("some-plugin-3"), is(Level.DEBUG)); assertThat(systemEnvironment.pluginLoggingLevel("some-plugin-4"), is(Level.INFO)); assertThat(systemEnvironment.pluginLoggingLevel("some-plugin-5"), is(Level.WARN)); assertThat(systemEnvironment.pluginLoggingLevel("some-plugin-6"), is(Level.ERROR)); } @Test @RunIf(value = DatabaseChecker.class, arguments = {DatabaseChecker.H2}) public void shouldGetGoDatabaseProvider() { assertThat("default provider should be h2db", systemEnvironment.getDatabaseProvider(), is("com.thoughtworks.go.server.database.H2Database")); System.setProperty("go.database.provider", "foo"); assertThat(systemEnvironment.getDatabaseProvider(), is("foo")); } @Test public void shouldFindGoServerStatusToBeActiveByDefault() throws Exception { assertThat(systemEnvironment.isServerActive(), is(true)); } @Test public void shouldPutServerInActiveMode() throws Exception { String key = "go.server.state"; try { System.setProperty(key, "passive"); systemEnvironment.switchToActiveState(); assertThat(systemEnvironment.isServerActive(), is(true)); } finally { System.clearProperty(key); } } @Test public void shouldPutServerInPassiveMode() throws Exception { String key = "go.server.state"; try { System.setProperty(key, "active"); systemEnvironment.switchToPassiveState(); assertThat(systemEnvironment.isServerActive(), is(false)); } finally { System.clearProperty(key); } } @Test public void shouldFindGoServerStatusToBePassive() throws Exception { try { SystemEnvironment systemEnvironment = new SystemEnvironment(); System.setProperty("go.server.state", "passive"); assertThat(systemEnvironment.isServerActive(), is(false)); } finally { System.clearProperty("go.server.state"); } } @Test public void shouldUseJetty9ByDefault() { assertThat(systemEnvironment.get(SystemEnvironment.APP_SERVER), is(SystemEnvironment.JETTY9)); assertThat(systemEnvironment.usingJetty9(), is(true)); systemEnvironment.set(SystemEnvironment.APP_SERVER, "JETTY6"); assertThat(systemEnvironment.usingJetty9(), is(false)); } @Test public void shouldGetDefaultLandingPageAsPipelines() throws Exception { String landingPage = systemEnvironment.landingPage(); assertThat(landingPage, is("/pipelines")); } @Test public void shouldAbleToOverrideDefaultLandingPageAsPipelines() throws Exception { try { System.setProperty("go.landing.page", "/admin/pipelines"); String landingPage = systemEnvironment.landingPage(); assertThat(landingPage, is("/admin/pipelines")); } finally { System.clearProperty("go.landing.page"); } } @Test public void shouldAllowSSLConfigurationByDefault() { assertThat(SystemEnvironment.GO_SSL_CONFIG_ALLOW.propertyName(), is("go.ssl.config.allow")); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_CONFIG_ALLOW), is(true)); System.setProperty(SystemEnvironment.GO_SSL_CONFIG_ALLOW.propertyName(), "false"); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_CONFIG_ALLOW), is(false)); } @Test public void shouldSetTLS1Dot2AsDefaultTransportProtocolForAgent() { assertThat(SystemEnvironment.GO_SSL_TRANSPORT_PROTOCOL_TO_BE_USED_BY_AGENT.propertyName(), is("go.ssl.agent.protocol")); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_TRANSPORT_PROTOCOL_TO_BE_USED_BY_AGENT), is("TLSv1.2")); System.setProperty(SystemEnvironment.GO_SSL_TRANSPORT_PROTOCOL_TO_BE_USED_BY_AGENT.propertyName(), "SSL"); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_TRANSPORT_PROTOCOL_TO_BE_USED_BY_AGENT), is("SSL")); } @Test public void shouldGetIncludedCiphersForSSLConfig() { assertThat(SystemEnvironment.GO_SSL_INCLUDE_CIPHERS.propertyName(), is("go.ssl.ciphers.include")); assertThat(SystemEnvironment.GO_SSL_INCLUDE_CIPHERS instanceof SystemEnvironment.GoStringArraySystemProperty, is(true)); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_INCLUDE_CIPHERS), is(nullValue())); } @Test public void shouldGetExcludedCiphersForSSLConfig() { assertThat(SystemEnvironment.GO_SSL_EXCLUDE_CIPHERS.propertyName(), is("go.ssl.ciphers.exclude")); assertThat(SystemEnvironment.GO_SSL_EXCLUDE_CIPHERS instanceof SystemEnvironment.GoStringArraySystemProperty, is(true)); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_EXCLUDE_CIPHERS), is(nullValue())); } @Test public void shouldGetExcludedProtocolsForSSLConfig() { assertThat(SystemEnvironment.GO_SSL_EXCLUDE_PROTOCOLS.propertyName(), is("go.ssl.protocols.exclude")); assertThat(SystemEnvironment.GO_SSL_EXCLUDE_PROTOCOLS instanceof SystemEnvironment.GoStringArraySystemProperty, is(true)); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_EXCLUDE_PROTOCOLS), is(nullValue())); } @Test public void shouldGetIncludedProtocolsForSSLConfig() { assertThat(SystemEnvironment.GO_SSL_INCLUDE_PROTOCOLS.propertyName(), is("go.ssl.protocols.include")); assertThat(SystemEnvironment.GO_SSL_INCLUDE_PROTOCOLS instanceof SystemEnvironment.GoStringArraySystemProperty, is(true)); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_INCLUDE_PROTOCOLS), is(nullValue())); } @Test public void shouldGetRenegotiationAllowedFlagForSSLConfig() { assertThat(SystemEnvironment.GO_SSL_RENEGOTIATION_ALLOWED.propertyName(), is("go.ssl.renegotiation.allowed")); boolean defaultValue = true; assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_RENEGOTIATION_ALLOWED), is(defaultValue)); System.clearProperty("go.ssl.renegotiation.allowed"); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_RENEGOTIATION_ALLOWED), is(defaultValue)); System.setProperty("go.ssl.renegotiation.allowed", "false"); assertThat(systemEnvironment.get(SystemEnvironment.GO_SSL_RENEGOTIATION_ALLOWED), is(false)); } @Test public void ShouldRemoveWhiteSpacesForStringArraySystemProperties() { String[] defaultValue = {"junk", "funk"}; String propertyName = "property.name"; SystemEnvironment.GoStringArraySystemProperty property = new SystemEnvironment.GoStringArraySystemProperty(propertyName, defaultValue); System.setProperty(propertyName, " foo , bar "); assertThat(systemEnvironment.get(property).length, is(2)); assertThat(systemEnvironment.get(property)[0], is("foo")); assertThat(systemEnvironment.get(property)[1], is("bar")); } @Test public void ShouldUseDefaultValueForStringArraySystemPropertiesWhenTheValueIsSetToEmptyString() { String[] defaultValue = {"junk", "funk"}; String propertyName = "property.name"; SystemEnvironment.GoStringArraySystemProperty property = new SystemEnvironment.GoStringArraySystemProperty(propertyName, defaultValue); System.clearProperty(propertyName); assertThat(systemEnvironment.get(property), is(defaultValue)); System.setProperty(propertyName, " "); assertThat(systemEnvironment.get(property), is(defaultValue)); } @Test public void shouldSetConfigRepoGCToBeAggressiveByDefault() { assertThat(new SystemEnvironment().get(SystemEnvironment.GO_CONFIG_REPO_GC_AGGRESSIVE), is(true)); } @Test public void shouldTurnOffPeriodicGCByDefault() { assertThat(new SystemEnvironment().get(SystemEnvironment.GO_CONFIG_REPO_PERIODIC_GC), is(false)); } @Test public void shouldGetUpdateServerPublicKeyFilePath() { assertThat(SystemEnvironment.GO_UPDATE_SERVER_PUBLIC_KEY_FILE_NAME.propertyName(), is("go.update.server.public.key.file.name")); System.setProperty("go.update.server.public.key.file.name", "public_key"); assertThat(systemEnvironment.getUpdateServerPublicKeyPath(), is(systemEnvironment.getConfigDir() + "/public_key")); } @Test public void shouldGetUpdateServerUrl() { assertThat(SystemEnvironment.GO_UPDATE_SERVER_URL.propertyName(), is("go.update.server.url")); System.setProperty("go.update.server.url", "http://update_server_url"); assertThat(systemEnvironment.getUpdateServerUrl(), is("http://update_server_url")); } @Test public void shouldCheckIfGOUpdatesIsEnabled() { assertThat(SystemEnvironment.GO_CHECK_UPDATES.propertyName(), is("go.check.updates")); assertTrue(systemEnvironment.isGOUpdateCheckEnabled()); System.setProperty("go.check.updates", "false"); assertFalse(systemEnvironment.isGOUpdateCheckEnabled()); } @Test public void shouldReturnFalseWhenShineEnabledIsNotSet() { assertFalse(systemEnvironment.isShineEnabled()); } }
/** * Copyright 2015 Confluent Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package io.confluent.kafkarest; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.Vector; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import io.confluent.kafkarest.entities.TopicPartitionOffset; import kafka.common.MessageStreamsExistException; import kafka.consumer.KafkaStream; import kafka.javaapi.consumer.ConsumerConnector; import kafka.message.MessageAndMetadata; import kafka.serializer.Decoder; /** * Tracks all the state for a consumer. This class is abstract in order to support multiple * serialization formats. Implementations must provide decoders and a method to convert * {@code KafkaMessageAndMetadata<K,V>} values to ConsumerRecords that can be returned to the client * (including translation if the decoded Kafka consumer type and ConsumerRecord types differ). */ public abstract class ConsumerState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT> implements Comparable<ConsumerState> { private KafkaRestConfig config; private ConsumerInstanceId instanceId; private ConsumerConnector consumer; private Map<String, ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT>> topics; private long expiration; // A read/write lock on the ConsumerState allows concurrent readTopic calls, but allows // commitOffsets to safely lock the entire state in order to get correct information about all // the topic/stream's current offset state. All operations on individual TopicStates must be // synchronized at that level as well (so, e.g., readTopic may modify a single TopicState, but // only needs read access to the ConsumerState). private ReadWriteLock lock; public ConsumerState( KafkaRestConfig config, ConsumerInstanceId instanceId, ConsumerConnector consumer ) { this.config = config; this.instanceId = instanceId; this.consumer = consumer; this.topics = new HashMap<>(); this.expiration = config.getTime().milliseconds() + config.getInt(KafkaRestConfig.CONSUMER_INSTANCE_TIMEOUT_MS_CONFIG); this.lock = new ReentrantReadWriteLock(); } public ConsumerInstanceId getId() { return instanceId; } /** * Gets the key decoder for the Kafka consumer. */ protected abstract Decoder<KafkaKeyT> getKeyDecoder(); /** * Gets the value decoder for the Kafka consumer. */ protected abstract Decoder<KafkaValueT> getValueDecoder(); /** * Converts a MessageAndMetadata using the Kafka decoder types into a ConsumerRecord using the * client's requested types. While doing so, computes the approximate size of the message in * bytes, which is used to track the approximate total payload size for consumer read responses to * determine when to trigger the response. */ public abstract ConsumerRecordAndSize<ClientKeyT, ClientValueT> createConsumerRecord( MessageAndMetadata<KafkaKeyT, KafkaValueT> msg ); /** * Start a read on the given topic, enabling a read lock on this ConsumerState and a full lock on * the ConsumerTopicState. */ public void startRead( ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT> topicState ) { lock.readLock().lock(); topicState.lock(); } /** * Finish a read request, releasing the lock on the ConsumerTopicState and the read lock on this * ConsumerState. */ public void finishRead( ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT> topicState ) { topicState.unlock(); lock.readLock().unlock(); } public List<TopicPartitionOffset> commitOffsets() { lock.writeLock().lock(); try { consumer.commitOffsets(); List<TopicPartitionOffset> result = getOffsets(true); return result; } finally { lock.writeLock().unlock(); } } public void close() { lock.writeLock().lock(); try { consumer.shutdown(); // Marks this state entry as no longer valid because the consumer group is being destroyed. consumer = null; topics = null; } finally { lock.writeLock().unlock(); } } public boolean expired(long nowMs) { return expiration <= nowMs; } public void updateExpiration() { this.expiration = config.getTime().milliseconds() + config.getInt(KafkaRestConfig.CONSUMER_INSTANCE_TIMEOUT_MS_CONFIG); } public long untilExpiration(long nowMs) { return this.expiration - nowMs; } public KafkaRestConfig getConfig() { return config; } public void setConfig(KafkaRestConfig config) { this.config = config; } @Override public int compareTo(ConsumerState o) { if (this.expiration < o.expiration) { return -1; } else if (this.expiration == o.expiration) { return 0; } else { return 1; } } public ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT> getOrCreateTopicState( String topic ) { // Try getting the topic only using the read lock lock.readLock().lock(); try { if (topics == null) { return null; } ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT> state = topics.get(topic); if (state != null) { return state; } } finally { lock.readLock().unlock(); } lock.writeLock().lock(); try { if (topics == null) { return null; } ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT> state = topics.get(topic); if (state != null) { return state; } Map<String, Integer> subscriptions = new TreeMap<String, Integer>(); subscriptions.put(topic, 1); Map<String, List<KafkaStream<KafkaKeyT, KafkaValueT>>> streamsByTopic = consumer.createMessageStreams(subscriptions, getKeyDecoder(), getValueDecoder()); KafkaStream<KafkaKeyT, KafkaValueT> stream = streamsByTopic.get(topic).get(0); state = new ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT>(stream); topics.put(topic, state); return state; } catch (MessageStreamsExistException e) { throw Errors.consumerAlreadySubscribedException(); } finally { lock.writeLock().unlock(); } } /** * Gets a list of TopicPartitionOffsets describing the current state of consumer offsets, possibly * updating the commmitted offset record. This method is not synchronized. * * @param updateCommitOffsets if true, updates committed offsets to be the same as the consumed * offsets. */ private List<TopicPartitionOffset> getOffsets(boolean updateCommitOffsets) { List<TopicPartitionOffset> result = new Vector<TopicPartitionOffset>(); for (Map.Entry<String, ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT>> entry : topics.entrySet()) { ConsumerTopicState<KafkaKeyT, KafkaValueT, ClientKeyT, ClientValueT> state = entry.getValue(); state.lock(); try { for (Map.Entry<Integer, Long> partEntry : state.getConsumedOffsets().entrySet()) { Integer partition = partEntry.getKey(); Long offset = partEntry.getValue(); Long committedOffset = 0L; if (updateCommitOffsets) { state.getCommittedOffsets().put(partition, offset); committedOffset = offset; } else { committedOffset = state.getCommittedOffsets().get(partition); } result.add( new TopicPartitionOffset( entry.getKey(), partition, offset, (committedOffset == null ? -1 : committedOffset) ) ); } } finally { state.unlock(); } } return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.ml.math.primitives.matrix; import java.io.Externalizable; import java.util.Spliterator; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.ml.math.Destroyable; import org.apache.ignite.ml.math.MetaAttributes; import org.apache.ignite.ml.math.StorageOpsMetrics; import org.apache.ignite.ml.math.exceptions.UnsupportedOperationException; import org.apache.ignite.ml.math.exceptions.math.CardinalityException; import org.apache.ignite.ml.math.exceptions.math.IndexException; import org.apache.ignite.ml.math.functions.IgniteBiFunction; import org.apache.ignite.ml.math.functions.IgniteDoubleFunction; import org.apache.ignite.ml.math.functions.IgniteFunction; import org.apache.ignite.ml.math.functions.IgniteTriFunction; import org.apache.ignite.ml.math.functions.IntIntToDoubleFunction; import org.apache.ignite.ml.math.primitives.vector.Vector; /** * A matrix interface. * <p> * Based on its flavor it can have vastly different implementations tailored for * for different types of data (e.g. dense vs. sparse), different sizes of data or different operation * optimizations.</p> * <p> * Note also that not all operations can be supported by all underlying implementations. If an operation is not * supported a {@link UnsupportedOperationException} is thrown. This exception can also be thrown in partial cases * where an operation is unsupported only in special cases, e.g. where a given operation cannot be deterministically * completed in polynomial time.</p> * <p> * Based on ideas from <a href="http://mahout.apache.org/">Apache Mahout</a>.</p> */ public interface Matrix extends MetaAttributes, Externalizable, StorageOpsMetrics, Destroyable { /** * Holder for matrix's element. */ interface Element { /** * Gets element's value. * * @return The value of this matrix element. */ double get(); /** * Gets element's row index. * * @return The row index of this element. */ int row(); /** * Gets element's column index. * * @return The column index of this element. */ int column(); /** * Sets element's value. * * @param val Value to set. */ void set(double val); } /** * Gets the maximum value in this matrix. * * @return Maximum value in this matrix. */ public double maxValue(); /** * Gets the minimum value in this matrix. * * @return Minimum value in this matrix. */ public double minValue(); /** * Gets the maximum element in this matrix. * * @return Maximum element in this matrix. */ public Element maxElement(); /** * Gets the minimum element in this matrix. * * @return Minimum element in this matrix. */ public Element minElement(); /** * Gets the matrix's element at the given coordinates. * * @param row Row index. * @param col Column index. * @return Element at the given coordinates. */ public Element getElement(int row, int col); /** * Swaps two rows in this matrix. * * @param row1 Row #1. * @param row2 Row #2. * @return This matrix. */ public Matrix swapRows(int row1, int row2); /** * Swaps two columns in this matrix. * * @param col1 Column #1. * @param col2 Column #2. * @return This matrix. */ public Matrix swapColumns(int col1, int col2); /** * Assigns given value to all elements of this matrix. * * @param val Value to assign to all elements. * @return This matrix. */ public Matrix assign(double val); /** * Assigns given values to this matrix. * * @param vals Values to assign. * @return This matrix. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix assign(double[][] vals); /** * Assigns values from given matrix to this matrix. * * @param mtx Matrix to assign to this matrix. * @return This matrix. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix assign(Matrix mtx); /** * Assigns each matrix element to the value generated by given function. * * @param fun Function that takes the row and column and returns the value to assign. * @return This matrix. */ public Matrix assign(IntIntToDoubleFunction fun); /** * Maps all values in this matrix through a given function. * * @param fun Mapping function. * @return This matrix. */ public Matrix map(IgniteDoubleFunction<Double> fun); /** * Maps all values in this matrix through a given function. * <p> * For this matrix {@code A}, argument matrix {@code B} and the * function {@code F} this method maps every cell {@code x, y} as: * {@code A(x,y) = fun(A(x,y), B(x,y))}.</p> * * @param mtx Argument matrix. * @param fun Mapping function. * @return This function. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix map(Matrix mtx, IgniteBiFunction<Double, Double, Double> fun); /** * Gets number of non-zero elements in this matrix. * * @return Number of non-zero elements in this matrix. */ public int nonZeroElements(); /** * Gets spliterator for all values in this matrix. * * @return Spliterator for all values. */ public Spliterator<Double> allSpliterator(); /** * Gets spliterator for all non-zero values in this matrix. * * @return Spliterator for all non-zero values. */ public Spliterator<Double> nonZeroSpliterator(); /** * Assigns values from given vector to the specified column in this matrix. * * @param col Column index. * @param vec Vector to get values from. * @return This matrix. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix assignColumn(int col, Vector vec); /** * Assigns values from given vector to the specified row in this matrix. * * @param row Row index. * @param vec Vector to get values from. * @return This matrix. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix assignRow(int row, Vector vec); /** * Collects the results of applying a given function to all rows in this matrix. * * @param fun Aggregating function. * @return Vector of row aggregates. */ public Vector foldRows(IgniteFunction<Vector, Double> fun); /** * Collects the results of applying a given function to all columns in this matrix. * * @param fun Aggregating function. * @return Vector of column aggregates. */ public Vector foldColumns(IgniteFunction<Vector, Double> fun); /** * Folds this matrix into a single value. * * @param foldFun Folding function that takes two parameters: accumulator and the current value. * @param mapFun Mapping function that is called on each matrix cell before its passed to the accumulator (as its * second parameter). * @param <T> Type of the folded value. * @param zeroVal Zero value for fold function. * @return Folded value of this matrix. */ public <T> T foldMap(IgniteBiFunction<T, Double, T> foldFun, IgniteDoubleFunction<Double> mapFun, T zeroVal); /** * Calculates the density of the matrix based on supplied criteria. * Returns {@code true} if this matrix is denser than threshold with at least 80% confidence. * * @param threshold the threshold value [0, 1] of non-zero elements above which the matrix is considered dense. */ public boolean density(double threshold); /** * Gets number of columns in this matrix. * * @return The number of columns in this matrix. */ public int columnSize(); /** * Gets number of rows in this matrix. * * @return The number of rows in this matrix. */ public int rowSize(); /** * Divides each value in this matrix by the argument. * * @param x Divider value. * @return This matrix. */ public Matrix divide(double x); /** * Gets the matrix value at the provided location. * * @param row Row index. * @param col Column index. * @return Matrix value. * @throws IndexException Thrown in case of index is out of bound. */ public double get(int row, int col); /** * Gets the matrix value at the provided location without checking boundaries. * This method is marginally quicker than its {@link #get(int, int)} sibling. * * @param row Row index. * @param col Column index. * @return Matrix value. */ public double getX(int row, int col); /** * Gets matrix storage model. */ public MatrixStorage getStorage(); /** * Clones this matrix. * <p> * NOTE: new matrix will have the same flavor as the this matrix but a different ID.</p> * * @return New matrix of the same underlying class, the same size and the same values. */ public Matrix copy(); /** * Creates new empty matrix of the same underlying class but of different size. * <p> * NOTE: new matrix will have the same flavor as the this matrix but a different ID.</p> * * @param rows Number of rows for new matrix. * @param cols Number of columns for new matrix. * @return New matrix of the same underlying class and size. */ public Matrix like(int rows, int cols); /** * Creates new empty vector of compatible properties (similar or the same flavor) to this matrix. * * @param crd Cardinality of the vector. * @return Newly created empty vector "compatible" to this matrix. */ public Vector likeVector(int crd); /** * Creates new matrix where each value is a difference between corresponding value of this matrix and * passed in argument matrix. * * @param mtx Argument matrix. * @return New matrix of the same underlying class and size. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix minus(Matrix mtx); /** * Creates new matrix where each value is a sum of the corresponding value of this matrix and * argument value. * * @param x Value to add. * @return New matrix of the same underlying class and size. */ public Matrix plus(double x); /** * Creates new matrix where each value is a sum of corresponding values of this matrix and * passed in argument matrix. * * @param mtx Argument matrix. * @return New matrix of the same underlying class and size. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix plus(Matrix mtx); /** * Auto-generated globally unique matrix ID. * * @return Matrix GUID. */ public IgniteUuid guid(); /** * Sets given value. * * @param row Row index. * @param col Column index. * @param val Value to set. * @return This matrix. * @throws IndexException Thrown in case of either index is out of bound. */ public Matrix set(int row, int col, double val); /** * Sets values for given row. * * @param row Row index. * @param data Row data to set. * @return This matrix. * @throws IndexException Thrown in case of index is out of bound. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix setRow(int row, double[] data); /** * Get a specific row from matrix. * * @param row Row index. * @return row. */ public Vector getRow(int row); /** * Sets values for given column. * * @param col Column index. * @param data Column data to set. * @return This matrix. * @throws IndexException Thrown in case of index is out of bound. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix setColumn(int col, double[] data); /** * Get a specific row from matrix. * * @param col Col index. * @return Col. */ public Vector getCol(int col); /** * Sets given value without checking for index bounds. This method is marginally faster * than its {@link #set(int, int, double)} sibling. * * @param row Row index. * @param col Column index. * @param val Value to set. * @return This matrix. */ public Matrix setX(int row, int col, double val); /** * Creates new matrix containing the product of given value and values in this matrix. * * @param x Value to multiply. * @return New matrix. */ public Matrix times(double x); /** * Creates new matrix that is the product of multiplying this matrix and the argument matrix. * * @param mtx Argument matrix. * @return New matrix. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Matrix times(Matrix mtx); /** * Creates new matrix that is the product of multiplying this matrix and the argument vector. * * @param vec Argument vector. * @return New matrix. * @throws CardinalityException Thrown if cardinalities mismatch. */ public Vector times(Vector vec); /** * Gets sum of all elements in the matrix. * * @return Sum of all elements in this matrix. */ public double sum(); /** * Creates new matrix that is transpose of this matrix. * * @return New transposed matrix. */ public Matrix transpose(); /** * Creates new view into matrix row. Changes to the view will be propagated to this matrix. * * @param row Row index. * @return New view. * @throws IndexException Thrown in case of index is out of bound. */ public Vector viewRow(int row); /** * Creates new view into matrix column . Changes to the view will be propagated to this matrix. * * @param col Column index. * @return New view. * @throws IndexException Thrown in case of index is out of bound. */ public Vector viewColumn(int col); /** * Creates new view into matrix diagonal. Changes to the view will be propagated to this matrix. * * @return New view. */ public Vector viewDiagonal(); /** {@inheritDoc} */ @Override public default void destroy() { // No-op. } /** * Replace matrix entry with value oldVal at (row, col) with result of computing f(row, col, oldVal). * * @param row Row. * @param col Column. * @param f Function used for replacing. */ public void compute(int row, int col, IgniteTriFunction<Integer, Integer, Double, Double> f); /** * Returns matrix determinant using Laplace theorem. * * @return A determinant for this matrix. * @throws CardinalityException Thrown if matrix is not square. */ public double determinant(); /** * Returns the inverse matrix of this matrix * * @return Inverse of this matrix */ public Matrix inverse(); /** {@inheritDoc} */ @Override public default boolean isNumeric() { return true; } }