gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package org.intellij.plugins.xpathView.search; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ContentIterator; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.OrderEnumerator; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Conditions; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileVisitor; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.PsiSearchScopeUtil; import com.intellij.util.Processor; import com.intellij.util.xmlb.annotations.Attribute; import com.intellij.util.xmlb.annotations.Tag; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.Collections; import java.util.HashSet; /** * Copyright 2006 Sascha Weinreuter * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public final class SearchScope { public enum ScopeType { PROJECT, MODULE, DIRECTORY, CUSTOM } private ScopeType myScopeType; private String myModuleName; private String myPath; private boolean myRecursive; private String myScopeName; private com.intellij.psi.search.SearchScope myCustomScope; public SearchScope() { myScopeType = ScopeType.PROJECT; myRecursive = true; } public SearchScope(ScopeType scopeType, String directoryName, boolean recursive, String moduleName, String scopeName) { myScopeType = scopeType; myPath = directoryName; myRecursive = recursive; myModuleName = moduleName; myScopeName = scopeName; } void setCustomScope(com.intellij.psi.search.SearchScope customScope) { myCustomScope = customScope; } @NotNull public String getName() { switch(getScopeType()) { case PROJECT: return "Project"; case MODULE: return "Module '" + getModuleName() + "'"; case DIRECTORY: return "Directory '" + getPath() + "'"; case CUSTOM: return getScopeName(); } assert false; return null; } @NotNull @Attribute("type") public ScopeType getScopeType() { return myScopeType; } @SuppressWarnings("UnusedDeclaration") public void setScopeType(ScopeType scopeType) { myScopeType = scopeType; } @Tag public String getModuleName() { return myModuleName; } @SuppressWarnings("UnusedDeclaration") public void setModuleName(String moduleName) { myModuleName = moduleName; } @Nullable @Attribute("scope-name") public String getScopeName() { return myScopeName; } @SuppressWarnings("UnusedDeclaration") public void setScopeName(String scopeName) { myScopeName = scopeName; } @Nullable @Tag public String getPath() { return myPath; } public void setPath(String path) { myPath = path; } @Attribute public boolean isRecursive() { return myRecursive; } @SuppressWarnings("UnusedDeclaration") public void setRecursive(boolean recursive) { myRecursive = recursive; } public boolean isValid() { final String dirName = getPath(); final String moduleName = getModuleName(); switch(getScopeType()) { case MODULE: return moduleName != null && !moduleName.isEmpty(); case DIRECTORY: return dirName != null && !dirName.isEmpty() && findFile(dirName) != null; case CUSTOM: return myCustomScope != null; case PROJECT: return true; } return false; } void iterateContent(@NotNull final Project project, @NotNull Processor<? super VirtualFile> processor) { switch(getScopeType()) { case PROJECT: //noinspection unchecked ProjectRootManager.getInstance(project).getFileIndex().iterateContent(new MyFileIterator(processor, Conditions.alwaysTrue())); break; case MODULE: final Module module = ModuleManager.getInstance(project).findModuleByName(getModuleName()); assert module != null; ModuleRootManager.getInstance(module).getFileIndex().iterateContent(new MyFileIterator(processor, Conditions.alwaysTrue())); break; case DIRECTORY: final String dirName = getPath(); assert dirName != null; final VirtualFile virtualFile = findFile(dirName); if(virtualFile != null) { iterateRecursively(virtualFile, processor, isRecursive()); } break; case CUSTOM: assert myCustomScope != null; final ContentIterator iterator; if(myCustomScope instanceof GlobalSearchScope) { final GlobalSearchScope searchScope = (GlobalSearchScope) myCustomScope; iterator = new MyFileIterator(processor, virtualFile13 -> searchScope.contains(virtualFile13)); if(searchScope.isSearchInLibraries()) { final OrderEnumerator enumerator = OrderEnumerator.orderEntries(project).withoutModuleSourceEntries().withoutDepModules(); final Collection<VirtualFile> libraryFiles = new HashSet<>(); Collections.addAll(libraryFiles, enumerator.getClassesRoots()); Collections.addAll(libraryFiles, enumerator.getSourceRoots()); final Processor<VirtualFile> adapter = virtualFile1 -> iterator.processFile(virtualFile1); for(final VirtualFile file : libraryFiles) { iterateRecursively(file, adapter, true); } } } else { final PsiManager manager = PsiManager.getInstance(project); iterator = new MyFileIterator(processor, virtualFile12 -> { final PsiFile element = manager.findFile(virtualFile12); return element != null && PsiSearchScopeUtil.isInScope(myCustomScope, element); }); } ProjectRootManager.getInstance(project).getFileIndex().iterateContent(iterator); } } @Override public boolean equals(Object o) { if(this == o) { return true; } if(o == null || getClass() != o.getClass()) { return false; } SearchScope scope = (SearchScope) o; return myRecursive == scope.myRecursive && Comparing.equal(myCustomScope, scope.myCustomScope) && Comparing.equal(myModuleName, scope.myModuleName) && Comparing.equal(myPath, scope.myPath) && Comparing.equal(myScopeName, scope.myScopeName) && myScopeType == scope.myScopeType; } @Override public int hashCode() { int result = myScopeType != null ? myScopeType.hashCode() : 0; result = 31 * result + (myModuleName != null ? myModuleName.hashCode() : 0); result = 31 * result + (myPath != null ? myPath.hashCode() : 0); result = 31 * result + (myRecursive ? 1 : 0); result = 31 * result + (myScopeName != null ? myScopeName.hashCode() : 0); result = 31 * result + (myCustomScope != null ? myCustomScope.hashCode() : 0); return result; } @Nullable private static VirtualFile findFile(String dirName) { return LocalFileSystem.getInstance().findFileByPath(dirName.replace('\\', '/')); } private static void iterateRecursively(VirtualFile virtualFile, final Processor<? super VirtualFile> processor, boolean recursive) { VfsUtilCore.visitChildrenRecursively(virtualFile, new VirtualFileVisitor(recursive ? null : VirtualFileVisitor.ONE_LEVEL_DEEP) { @Override public boolean visitFile(@NotNull VirtualFile file) { if(!file.isDirectory()) { processor.process(file); } return true; } }); } private static class MyFileIterator implements ContentIterator { private final Processor<? super VirtualFile> myProcessor; private final Condition<? super VirtualFile> myCondition; MyFileIterator(Processor<? super VirtualFile> processor, Condition<? super VirtualFile> condition) { myCondition = condition; myProcessor = processor; } @Override public boolean processFile(@NotNull VirtualFile fileOrDir) { if(!fileOrDir.isDirectory() && myCondition.value(fileOrDir)) { myProcessor.process(fileOrDir); } return true; } } }
package com.brainbeanapps.android_boilerplate.data.remote.model; import android.os.Parcel; import android.os.Parcelable; import com.brainbeanapps.core.net.model.BaseResponse; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; /** * Created by Rosty on 10/18/2016. */ public class UserResponse implements BaseResponse, Parcelable { @SerializedName ("login") @Expose private String login; @SerializedName("id") @Expose private Integer id; @SerializedName("avatar_url") @Expose private String avatarUrl; @SerializedName("url") @Expose private String url; @SerializedName("html_url") @Expose private String htmlUrl; @SerializedName("name") @Expose private String name; @SerializedName("company") @Expose private String company; @SerializedName("blog") @Expose private String blog; @SerializedName("location") @Expose private String location; @SerializedName("email") @Expose private String email; @SerializedName("bio") @Expose private String bio; @SerializedName("public_repos") @Expose private Integer publicRepos; @SerializedName("public_gists") @Expose private Integer publicGists; @SerializedName("followers") @Expose private Integer followers; @SerializedName("following") @Expose private Integer following; @SerializedName("created_at") @Expose private String createdAt; @SerializedName("is_saved") @Expose private boolean isSaved; public UserResponse() { } public UserResponse(final String userLogin, final String name, final String avatarUrl, final String bio) { this.login = userLogin; this.name = name; this.avatarUrl = avatarUrl; this.bio = bio; } public UserResponse(final String userLogin) { this.login = userLogin; } public String getLogin() { return login; } public Integer getId() { return id; } public String getAvatarUrl() { return avatarUrl; } public String getUrl() { return url; } public String getHtmlUrl() { return htmlUrl; } public String getName() { return name; } public String getCompany() { return company; } public String getBlog() { return blog; } public String getLocation() { return location; } public String getEmail() { return email; } public String getBio() { return bio; } public Integer getPublicRepos() { return publicRepos; } public Integer getPublicGists() { return publicGists; } public Integer getFollowers() { return followers; } public Integer getFollowing() { return following; } public String getCreatedAt() { return createdAt; } public boolean isSaved() { return isSaved; } public void setSaved(boolean saved) { isSaved = saved; } // PARCABLE @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(this.login); dest.writeValue(this.id); dest.writeString(this.avatarUrl); dest.writeString(this.url); dest.writeString(this.htmlUrl); dest.writeString(this.name); dest.writeString(this.company); dest.writeString(this.blog); dest.writeString(this.location); dest.writeString(this.email); dest.writeString(this.bio); dest.writeValue(this.publicRepos); dest.writeValue(this.publicGists); dest.writeValue(this.followers); dest.writeValue(this.following); dest.writeString(this.createdAt); dest.writeByte(this.isSaved ? (byte) 1 : (byte) 0); } protected UserResponse(Parcel in) { this.login = in.readString(); this.id = (Integer) in.readValue(Integer.class.getClassLoader()); this.avatarUrl = in.readString(); this.url = in.readString(); this.htmlUrl = in.readString(); this.name = in.readString(); this.company = in.readString(); this.blog = in.readString(); this.location = in.readString(); this.email = in.readString(); this.bio = in.readString(); this.publicRepos = (Integer) in.readValue(Integer.class.getClassLoader()); this.publicGists = (Integer) in.readValue(Integer.class.getClassLoader()); this.followers = (Integer) in.readValue(Integer.class.getClassLoader()); this.following = (Integer) in.readValue(Integer.class.getClassLoader()); this.createdAt = in.readString(); this.isSaved = in.readByte() != 0; } public static final Parcelable.Creator<UserResponse> CREATOR = new Parcelable.Creator<UserResponse>() { @Override public UserResponse createFromParcel(Parcel source) { return new UserResponse(source); } @Override public UserResponse[] newArray(int size) { return new UserResponse[size]; } }; }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p1beta1/image_annotator.proto package com.google.cloud.vision.v1p1beta1; /** * * * <pre> * Users describe the type of Google Cloud Vision API tasks to perform over * images by using *Feature*s. Each Feature indicates a type of image * detection task to perform. Features encode the Cloud Vision API * vertical to operate on and the number of top-scoring results to return. * </pre> * * Protobuf type {@code google.cloud.vision.v1p1beta1.Feature} */ public final class Feature extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p1beta1.Feature) FeatureOrBuilder { private static final long serialVersionUID = 0L; // Use Feature.newBuilder() to construct. private Feature(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Feature() { type_ = 0; model_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Feature(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Feature( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); type_ = rawValue; break; } case 16: { maxResults_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); model_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p1beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p1beta1_Feature_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p1beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p1beta1_Feature_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p1beta1.Feature.class, com.google.cloud.vision.v1p1beta1.Feature.Builder.class); } /** * * * <pre> * Type of image feature. * </pre> * * Protobuf enum {@code google.cloud.vision.v1p1beta1.Feature.Type} */ public enum Type implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Unspecified feature type. * </pre> * * <code>TYPE_UNSPECIFIED = 0;</code> */ TYPE_UNSPECIFIED(0), /** * * * <pre> * Run face detection. * </pre> * * <code>FACE_DETECTION = 1;</code> */ FACE_DETECTION(1), /** * * * <pre> * Run landmark detection. * </pre> * * <code>LANDMARK_DETECTION = 2;</code> */ LANDMARK_DETECTION(2), /** * * * <pre> * Run logo detection. * </pre> * * <code>LOGO_DETECTION = 3;</code> */ LOGO_DETECTION(3), /** * * * <pre> * Run label detection. * </pre> * * <code>LABEL_DETECTION = 4;</code> */ LABEL_DETECTION(4), /** * * * <pre> * Run OCR. * </pre> * * <code>TEXT_DETECTION = 5;</code> */ TEXT_DETECTION(5), /** * * * <pre> * Run dense text document OCR. Takes precedence when both * DOCUMENT_TEXT_DETECTION and TEXT_DETECTION are present. * </pre> * * <code>DOCUMENT_TEXT_DETECTION = 11;</code> */ DOCUMENT_TEXT_DETECTION(11), /** * * * <pre> * Run computer vision models to compute image safe-search properties. * </pre> * * <code>SAFE_SEARCH_DETECTION = 6;</code> */ SAFE_SEARCH_DETECTION(6), /** * * * <pre> * Compute a set of image properties, such as the image's dominant colors. * </pre> * * <code>IMAGE_PROPERTIES = 7;</code> */ IMAGE_PROPERTIES(7), /** * * * <pre> * Run crop hints. * </pre> * * <code>CROP_HINTS = 9;</code> */ CROP_HINTS(9), /** * * * <pre> * Run web detection. * </pre> * * <code>WEB_DETECTION = 10;</code> */ WEB_DETECTION(10), UNRECOGNIZED(-1), ; /** * * * <pre> * Unspecified feature type. * </pre> * * <code>TYPE_UNSPECIFIED = 0;</code> */ public static final int TYPE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Run face detection. * </pre> * * <code>FACE_DETECTION = 1;</code> */ public static final int FACE_DETECTION_VALUE = 1; /** * * * <pre> * Run landmark detection. * </pre> * * <code>LANDMARK_DETECTION = 2;</code> */ public static final int LANDMARK_DETECTION_VALUE = 2; /** * * * <pre> * Run logo detection. * </pre> * * <code>LOGO_DETECTION = 3;</code> */ public static final int LOGO_DETECTION_VALUE = 3; /** * * * <pre> * Run label detection. * </pre> * * <code>LABEL_DETECTION = 4;</code> */ public static final int LABEL_DETECTION_VALUE = 4; /** * * * <pre> * Run OCR. * </pre> * * <code>TEXT_DETECTION = 5;</code> */ public static final int TEXT_DETECTION_VALUE = 5; /** * * * <pre> * Run dense text document OCR. Takes precedence when both * DOCUMENT_TEXT_DETECTION and TEXT_DETECTION are present. * </pre> * * <code>DOCUMENT_TEXT_DETECTION = 11;</code> */ public static final int DOCUMENT_TEXT_DETECTION_VALUE = 11; /** * * * <pre> * Run computer vision models to compute image safe-search properties. * </pre> * * <code>SAFE_SEARCH_DETECTION = 6;</code> */ public static final int SAFE_SEARCH_DETECTION_VALUE = 6; /** * * * <pre> * Compute a set of image properties, such as the image's dominant colors. * </pre> * * <code>IMAGE_PROPERTIES = 7;</code> */ public static final int IMAGE_PROPERTIES_VALUE = 7; /** * * * <pre> * Run crop hints. * </pre> * * <code>CROP_HINTS = 9;</code> */ public static final int CROP_HINTS_VALUE = 9; /** * * * <pre> * Run web detection. * </pre> * * <code>WEB_DETECTION = 10;</code> */ public static final int WEB_DETECTION_VALUE = 10; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Type valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Type forNumber(int value) { switch (value) { case 0: return TYPE_UNSPECIFIED; case 1: return FACE_DETECTION; case 2: return LANDMARK_DETECTION; case 3: return LOGO_DETECTION; case 4: return LABEL_DETECTION; case 5: return TEXT_DETECTION; case 11: return DOCUMENT_TEXT_DETECTION; case 6: return SAFE_SEARCH_DETECTION; case 7: return IMAGE_PROPERTIES; case 9: return CROP_HINTS; case 10: return WEB_DETECTION; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Type>() { public Type findValueByNumber(int number) { return Type.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.vision.v1p1beta1.Feature.getDescriptor().getEnumTypes().get(0); } private static final Type[] VALUES = values(); public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Type(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.vision.v1p1beta1.Feature.Type) } public static final int TYPE_FIELD_NUMBER = 1; private int type_; /** * * * <pre> * The feature type. * </pre> * * <code>.google.cloud.vision.v1p1beta1.Feature.Type type = 1;</code> * * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * * * <pre> * The feature type. * </pre> * * <code>.google.cloud.vision.v1p1beta1.Feature.Type type = 1;</code> * * @return The type. */ @java.lang.Override public com.google.cloud.vision.v1p1beta1.Feature.Type getType() { @SuppressWarnings("deprecation") com.google.cloud.vision.v1p1beta1.Feature.Type result = com.google.cloud.vision.v1p1beta1.Feature.Type.valueOf(type_); return result == null ? com.google.cloud.vision.v1p1beta1.Feature.Type.UNRECOGNIZED : result; } public static final int MAX_RESULTS_FIELD_NUMBER = 2; private int maxResults_; /** * * * <pre> * Maximum number of results of this type. * </pre> * * <code>int32 max_results = 2;</code> * * @return The maxResults. */ @java.lang.Override public int getMaxResults() { return maxResults_; } public static final int MODEL_FIELD_NUMBER = 3; private volatile java.lang.Object model_; /** * * * <pre> * Model to use for the feature. * Supported values: "builtin/stable" (the default if unset) and * "builtin/latest". * </pre> * * <code>string model = 3;</code> * * @return The model. */ @java.lang.Override public java.lang.String getModel() { java.lang.Object ref = model_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); model_ = s; return s; } } /** * * * <pre> * Model to use for the feature. * Supported values: "builtin/stable" (the default if unset) and * "builtin/latest". * </pre> * * <code>string model = 3;</code> * * @return The bytes for model. */ @java.lang.Override public com.google.protobuf.ByteString getModelBytes() { java.lang.Object ref = model_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); model_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (type_ != com.google.cloud.vision.v1p1beta1.Feature.Type.TYPE_UNSPECIFIED.getNumber()) { output.writeEnum(1, type_); } if (maxResults_ != 0) { output.writeInt32(2, maxResults_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, model_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (type_ != com.google.cloud.vision.v1p1beta1.Feature.Type.TYPE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, type_); } if (maxResults_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, maxResults_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, model_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p1beta1.Feature)) { return super.equals(obj); } com.google.cloud.vision.v1p1beta1.Feature other = (com.google.cloud.vision.v1p1beta1.Feature) obj; if (type_ != other.type_) return false; if (getMaxResults() != other.getMaxResults()) return false; if (!getModel().equals(other.getModel())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; hash = (37 * hash) + MAX_RESULTS_FIELD_NUMBER; hash = (53 * hash) + getMaxResults(); hash = (37 * hash) + MODEL_FIELD_NUMBER; hash = (53 * hash) + getModel().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p1beta1.Feature parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p1beta1.Feature parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p1beta1.Feature parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.vision.v1p1beta1.Feature prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Users describe the type of Google Cloud Vision API tasks to perform over * images by using *Feature*s. Each Feature indicates a type of image * detection task to perform. Features encode the Cloud Vision API * vertical to operate on and the number of top-scoring results to return. * </pre> * * Protobuf type {@code google.cloud.vision.v1p1beta1.Feature} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p1beta1.Feature) com.google.cloud.vision.v1p1beta1.FeatureOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p1beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p1beta1_Feature_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p1beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p1beta1_Feature_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p1beta1.Feature.class, com.google.cloud.vision.v1p1beta1.Feature.Builder.class); } // Construct using com.google.cloud.vision.v1p1beta1.Feature.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); type_ = 0; maxResults_ = 0; model_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p1beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p1beta1_Feature_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p1beta1.Feature getDefaultInstanceForType() { return com.google.cloud.vision.v1p1beta1.Feature.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p1beta1.Feature build() { com.google.cloud.vision.v1p1beta1.Feature result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p1beta1.Feature buildPartial() { com.google.cloud.vision.v1p1beta1.Feature result = new com.google.cloud.vision.v1p1beta1.Feature(this); result.type_ = type_; result.maxResults_ = maxResults_; result.model_ = model_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p1beta1.Feature) { return mergeFrom((com.google.cloud.vision.v1p1beta1.Feature) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p1beta1.Feature other) { if (other == com.google.cloud.vision.v1p1beta1.Feature.getDefaultInstance()) return this; if (other.type_ != 0) { setTypeValue(other.getTypeValue()); } if (other.getMaxResults() != 0) { setMaxResults(other.getMaxResults()); } if (!other.getModel().isEmpty()) { model_ = other.model_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.vision.v1p1beta1.Feature parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.vision.v1p1beta1.Feature) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int type_ = 0; /** * * * <pre> * The feature type. * </pre> * * <code>.google.cloud.vision.v1p1beta1.Feature.Type type = 1;</code> * * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * * * <pre> * The feature type. * </pre> * * <code>.google.cloud.vision.v1p1beta1.Feature.Type type = 1;</code> * * @param value The enum numeric value on the wire for type to set. * @return This builder for chaining. */ public Builder setTypeValue(int value) { type_ = value; onChanged(); return this; } /** * * * <pre> * The feature type. * </pre> * * <code>.google.cloud.vision.v1p1beta1.Feature.Type type = 1;</code> * * @return The type. */ @java.lang.Override public com.google.cloud.vision.v1p1beta1.Feature.Type getType() { @SuppressWarnings("deprecation") com.google.cloud.vision.v1p1beta1.Feature.Type result = com.google.cloud.vision.v1p1beta1.Feature.Type.valueOf(type_); return result == null ? com.google.cloud.vision.v1p1beta1.Feature.Type.UNRECOGNIZED : result; } /** * * * <pre> * The feature type. * </pre> * * <code>.google.cloud.vision.v1p1beta1.Feature.Type type = 1;</code> * * @param value The type to set. * @return This builder for chaining. */ public Builder setType(com.google.cloud.vision.v1p1beta1.Feature.Type value) { if (value == null) { throw new NullPointerException(); } type_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The feature type. * </pre> * * <code>.google.cloud.vision.v1p1beta1.Feature.Type type = 1;</code> * * @return This builder for chaining. */ public Builder clearType() { type_ = 0; onChanged(); return this; } private int maxResults_; /** * * * <pre> * Maximum number of results of this type. * </pre> * * <code>int32 max_results = 2;</code> * * @return The maxResults. */ @java.lang.Override public int getMaxResults() { return maxResults_; } /** * * * <pre> * Maximum number of results of this type. * </pre> * * <code>int32 max_results = 2;</code> * * @param value The maxResults to set. * @return This builder for chaining. */ public Builder setMaxResults(int value) { maxResults_ = value; onChanged(); return this; } /** * * * <pre> * Maximum number of results of this type. * </pre> * * <code>int32 max_results = 2;</code> * * @return This builder for chaining. */ public Builder clearMaxResults() { maxResults_ = 0; onChanged(); return this; } private java.lang.Object model_ = ""; /** * * * <pre> * Model to use for the feature. * Supported values: "builtin/stable" (the default if unset) and * "builtin/latest". * </pre> * * <code>string model = 3;</code> * * @return The model. */ public java.lang.String getModel() { java.lang.Object ref = model_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); model_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Model to use for the feature. * Supported values: "builtin/stable" (the default if unset) and * "builtin/latest". * </pre> * * <code>string model = 3;</code> * * @return The bytes for model. */ public com.google.protobuf.ByteString getModelBytes() { java.lang.Object ref = model_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); model_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Model to use for the feature. * Supported values: "builtin/stable" (the default if unset) and * "builtin/latest". * </pre> * * <code>string model = 3;</code> * * @param value The model to set. * @return This builder for chaining. */ public Builder setModel(java.lang.String value) { if (value == null) { throw new NullPointerException(); } model_ = value; onChanged(); return this; } /** * * * <pre> * Model to use for the feature. * Supported values: "builtin/stable" (the default if unset) and * "builtin/latest". * </pre> * * <code>string model = 3;</code> * * @return This builder for chaining. */ public Builder clearModel() { model_ = getDefaultInstance().getModel(); onChanged(); return this; } /** * * * <pre> * Model to use for the feature. * Supported values: "builtin/stable" (the default if unset) and * "builtin/latest". * </pre> * * <code>string model = 3;</code> * * @param value The bytes for model to set. * @return This builder for chaining. */ public Builder setModelBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); model_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p1beta1.Feature) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p1beta1.Feature) private static final com.google.cloud.vision.v1p1beta1.Feature DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p1beta1.Feature(); } public static com.google.cloud.vision.v1p1beta1.Feature getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Feature> PARSER = new com.google.protobuf.AbstractParser<Feature>() { @java.lang.Override public Feature parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Feature(input, extensionRegistry); } }; public static com.google.protobuf.Parser<Feature> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Feature> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p1beta1.Feature getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.thinkaurelius.titan.diskstorage.locking; import com.codahale.metrics.MetricRegistry; import com.google.common.base.Preconditions; import com.thinkaurelius.titan.diskstorage.StaticBuffer; import com.thinkaurelius.titan.diskstorage.TemporaryStorageException; import com.thinkaurelius.titan.diskstorage.common.DistributedStoreManager; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.StoreTransaction; import com.thinkaurelius.titan.diskstorage.locking.consistentkey.ConsistentKeyLockStatus; import com.thinkaurelius.titan.diskstorage.locking.consistentkey.ConsistentKeyLocker; import com.thinkaurelius.titan.diskstorage.locking.consistentkey.ConsistentKeyLockerSerializer; import com.thinkaurelius.titan.diskstorage.util.KeyColumn; import com.thinkaurelius.titan.diskstorage.util.StaticByteBuffer; import com.thinkaurelius.titan.diskstorage.util.TimeUtility; import com.thinkaurelius.titan.diskstorage.util.TimestampProvider; import com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration; import com.thinkaurelius.titan.util.stats.MetricManager; import org.apache.commons.configuration.BaseConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Iterator; import java.util.Map; import java.util.concurrent.TimeUnit; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.NANOSECONDS; /** * Abstract base class for building lockers. Implements locking between threads * using {@link LocalLockMediator} but delegates inter-process lock resolution * to its subclasses. * * @param <S> An implementation-specific type holding information about a single * lock; see {@link ConsistentKeyLockStatus} for an example * @see ConsistentKeyLocker */ public abstract class AbstractLocker<S extends LockStatus> implements Locker { /** * Uniquely identifies a process within a domain (or across all domains, * though only intra-domain uniqueness is required) */ protected final StaticBuffer rid; /** * Sole source of time. Don't call {@link System#currentTimeMillis()} or * {@link System#nanoTime()} directly. Use only this object. This object is * replaced with a mock during testing to give tests exact control over the * flow of time. */ protected final TimestampProvider times; /** * This is sort-of Cassandra/HBase specific. It concatenates * {@link KeyColumn} arguments into a single StaticBuffer containing the key * followed by the column and vice-versa. */ protected final ConsistentKeyLockerSerializer serializer; /** * Resolves lock contention by multiple threads. */ protected final LocalLockMediator<StoreTransaction> llm; /** * Stores all information about all locks this implementation has taken on * behalf of any {@link StoreTransaction}. It is parameterized in a type * specific to the concrete subclass, so that concrete implementations can * store information specific to their locking primitives. */ protected final LockerState<S> lockState; /** * The amount of time, in nanoseconds, that may pass after writing a lock * before it is considered to be invalid and automatically unlocked. */ protected final long lockExpireNS; protected final Logger log; private static final String M_LOCKS = "locks"; private static final String M_WRITE = "write"; private static final String M_CHECK = "check"; private static final String M_DELETE = "delete"; private static final String M_CALLS = "calls"; private static final String M_EXCEPTIONS = "exceptions"; /** * Abstract builder for this Locker implementation. See * {@link ConsistentKeyLocker} for an example of how to subclass this * abstract builder into a concrete builder. * <p/> * If you're wondering why the bounds for the type parameter {@code B} looks so hideous, see: * <p/> * <a href="https://weblogs.java.net/blog/emcmanus/archive/2010/10/25/using-builder-pattern-subclasses">Using the builder pattern with subclasses by Eamonn McManus</a> * * @param <S> The concrete type of {@link LockStatus} * @param <B> The concrete type of the subclass extending this builder */ public static abstract class Builder<S, B extends Builder<S, B>> { protected StaticBuffer rid; protected TimestampProvider times; protected ConsistentKeyLockerSerializer serializer; protected LocalLockMediator<StoreTransaction> llm; protected LockerState<S> lockState; protected long lockExpireNS; protected Logger log; public Builder() { this.rid = new StaticByteBuffer(DistributedStoreManager.getRid(new BaseConfiguration())); this.times = TimeUtility.INSTANCE; this.serializer = new ConsistentKeyLockerSerializer(); this.llm = null; // redundant, but it preserves this constructor's overall pattern this.lockState = new LockerState<S>(); this.lockExpireNS = NANOSECONDS.convert(GraphDatabaseConfiguration.LOCK_EXPIRE_MS_DEFAULT, MILLISECONDS); this.log = LoggerFactory.getLogger(AbstractLocker.class); } /** * Concrete subclasses should just "{@code return this;}". * * @return concrete subclass instance */ protected abstract B self(); public B rid(StaticBuffer rid) { this.rid = rid; return self(); } public B times(TimestampProvider times) { this.times = times; return self(); } public B serializer(ConsistentKeyLockerSerializer serializer) { this.serializer = serializer; return self(); } public B mediator(LocalLockMediator<StoreTransaction> mediator) { this.llm = mediator; return self(); } /** * Retrieve the mediator associated with {@code name} via {@link LocalLockMediators#get(String)}. * * @param name the mediator name * @return this builder */ public B mediatorName(String name) { Preconditions.checkNotNull(name); mediator(LocalLockMediators.INSTANCE.<StoreTransaction>get(name)); return self(); } public B logger(Logger log) { this.log = log; return self(); } public B lockExpireNS(long exp, TimeUnit unit) { this.lockExpireNS = NANOSECONDS.convert(exp, unit); return self(); } /** * This method is only intended for testing. Calling this in production * could cause lock failures. * * @param state the initial lock state for this instance * @return this builder */ public B internalState(LockerState<S> state) { this.lockState = state; return self(); } /** * Inspect and modify this builder's state after the client has called * {@code build()}, but before a return object has been instantiated. * This is useful for catching illegal values or translating placeholder * configuration values into the objects they represent. This is * intended to be called from subclasses' build() methods. */ protected void preBuild() { if (null == llm) { llm = getDefaultMediator(); } } /** * Get the default {@link LocalLockMediator} for Locker being built. * This is called when the client doesn't specify a locker. * * @return a lock mediator */ protected abstract LocalLockMediator<StoreTransaction> getDefaultMediator(); } public AbstractLocker(StaticBuffer rid, TimestampProvider times, ConsistentKeyLockerSerializer serializer, LocalLockMediator<StoreTransaction> llm, LockerState<S> lockState, long lockExpireNS, Logger log) { this.rid = rid; this.times = times; this.serializer = serializer; this.llm = llm; this.lockState = lockState; this.lockExpireNS = lockExpireNS; this.log = log; } /** * Try to take/acquire/write/claim a lock uniquely identified within this * {@code Locker} by the {@code lockID} argument on behalf of {@code tx}. * * @param lockID identifies the lock * @param tx identifies the process claiming this lock * @return a {@code LockStatus} implementation on successful lock aquisition * @throws Throwable if the lock could not be taken/acquired/written/claimed or * the attempted write encountered an error */ protected abstract S writeSingleLock(KeyColumn lockID, StoreTransaction tx) throws Throwable; /** * Try to verify that the lock identified by {@code lockID} is already held * by {@code tx}. The {@code lockStatus} argument refers to the object * returned by a previous call to * {@link #writeSingleLock(KeyColumn, StoreTransaction)}. This should be a * read-only operation: return if the lock is already held, but this method * finds that it is not held, then throw an exception instead of trying to * acquire it. * <p/> * This method is only useful with nonblocking locking implementations try * to lock and then check the outcome of the attempt in two separate stages. * For implementations that build {@code writeSingleLock(...)} on a * synchronous locking primitive, such as a blocking {@code lock()} method * or a blocking semaphore {@code p()}, this method is redundant with * {@code writeSingleLock(...)} and may unconditionally return true. * * @param lockID identifies the lock to check * @param lockStatus the result of a prior successful {@code writeSingleLock(...)} * call on this {@code lockID} and {@code tx} * @param tx identifies the process claiming this lock * @throws Throwable if the lock fails the check or if the attempted check * encountered an error */ protected abstract void checkSingleLock(KeyColumn lockID, S lockStatus, StoreTransaction tx) throws Throwable; /** * Try to unlock/release/delete the lock identified by {@code lockID} and * both held by and verified for {@code tx}. This method is only called with * arguments for which {@link #writeSingleLock(KeyColumn, StoreTransaction)} * and {@link #checkSingleLock(KeyColumn, LockStatus, StoreTransaction)} * both returned successfully (i.e. without exceptions). * * @param lockID identifies the lock to release * @param lockStatus the result of a prior successful {@code writeSingleLock(...)} * followed by a successful {@code checkSingleLock(...)} * @param tx identifies the process that wrote and checked this lock * @throws Throwable if the lock could not be released/deleted or if the attempted * delete encountered an error */ protected abstract void deleteSingleLock(KeyColumn lockID, S lockStatus, StoreTransaction tx) throws Throwable; @Override public void writeLock(KeyColumn lockID, StoreTransaction tx) throws TemporaryLockingException, PermanentLockingException { if (null != tx.getConfiguration().getMetricsPrefix()) { MetricManager.INSTANCE.getCounter(tx.getConfiguration().getMetricsPrefix(), M_LOCKS, M_WRITE, M_CALLS).inc(); } if (lockState.has(tx, lockID)) { log.debug("Transaction {} already wrote lock on {}", tx, lockID); return; } if (lockLocally(lockID, tx)) { boolean ok = false; try { S stat = writeSingleLock(lockID, tx); lockLocally(lockID, stat.getExpirationTimestamp(TimeUnit.NANOSECONDS), tx); // update local lock expiration time lockState.take(tx, lockID, stat); ok = true; } catch (TemporaryStorageException tse) { throw new TemporaryLockingException(tse); } catch (AssertionError ae) { // Concession to ease testing with mocks & behavior verification ok = true; throw ae; } catch (Throwable t) { throw new PermanentLockingException(t); } finally { if (!ok) { // lockState.release(tx, lockID); // has no effect unlockLocally(lockID, tx); if (null != tx.getConfiguration().getMetricsPrefix()) { MetricManager.INSTANCE.getCounter(tx.getConfiguration().getMetricsPrefix(), M_LOCKS, M_WRITE, M_EXCEPTIONS).inc(); } } } } else { // Fail immediately with no retries on local contention throw new PermanentLockingException("Local lock contention"); } } @Override public void checkLocks(StoreTransaction tx) throws TemporaryLockingException, PermanentLockingException { if (null != tx.getConfiguration().getMetricsPrefix()) { MetricManager.INSTANCE.getCounter(tx.getConfiguration().getMetricsPrefix(), M_LOCKS, M_CHECK, M_CALLS).inc(); } Map<KeyColumn, S> m = lockState.getLocksForTx(tx); if (m.isEmpty()) { return; // no locks for this tx } // We never receive interrupts in normal operation; one can only appear // during Thread.sleep(), and in that case it probably means the entire // Titan process is shutting down; for this reason, we return ASAP on an // interrupt boolean ok = false; try { for (KeyColumn kc : m.keySet()) { checkSingleLock(kc, m.get(kc), tx); } ok = true; } catch (InterruptedException e) { throw new TemporaryLockingException(e); } catch (TemporaryStorageException tse) { throw new TemporaryLockingException(tse); } catch (AssertionError ae) { throw ae; // Concession to ease testing with mocks & behavior verification } catch (Throwable t) { throw new PermanentLockingException(t); } finally { if (!ok && null != tx.getConfiguration().getMetricsPrefix()) { MetricManager.INSTANCE.getCounter(tx.getConfiguration().getMetricsPrefix(), M_LOCKS, M_CHECK, M_CALLS).inc(); } } } @Override public void deleteLocks(StoreTransaction tx) throws TemporaryLockingException, PermanentLockingException { if (null != tx.getConfiguration().getMetricsPrefix()) { MetricManager.INSTANCE.getCounter(tx.getConfiguration().getMetricsPrefix(), M_LOCKS, M_DELETE, M_CALLS).inc(); } Map<KeyColumn, S> m = lockState.getLocksForTx(tx); Iterator<KeyColumn> iter = m.keySet().iterator(); while (iter.hasNext()) { KeyColumn kc = iter.next(); S ls = m.get(kc); try { deleteSingleLock(kc, ls, tx); } catch (AssertionError ae) { throw ae; // Concession to ease testing with mocks & behavior verification } catch (Throwable t) { log.error("Exception while deleting lock on " + kc, t); if (null != tx.getConfiguration().getMetricsPrefix()) { MetricManager.INSTANCE.getCounter(tx.getConfiguration().getMetricsPrefix(), M_LOCKS, M_DELETE, M_CALLS).inc(); } } // Regardless of whether we successfully deleted the lock from storage, take it out of the local mediator llm.unlock(kc, tx); iter.remove(); } } private boolean lockLocally(KeyColumn lockID, StoreTransaction tx) { return lockLocally(lockID, times.getApproxNSSinceEpoch() + lockExpireNS, tx); } private boolean lockLocally(KeyColumn lockID, long expireNS, StoreTransaction tx) { return llm.lock(lockID, tx, expireNS, TimeUnit.NANOSECONDS); } private void unlockLocally(KeyColumn lockID, StoreTransaction txh) { llm.unlock(lockID, txh); } }
package com.angkorteam.fintech.dto.builder; import java.io.Serializable; import java.util.Date; import org.apache.commons.lang3.time.DateFormatUtils; import com.angkorteam.fintech.dto.enums.ChargeCalculation; import com.angkorteam.fintech.dto.enums.ChargeFrequency; import com.angkorteam.fintech.dto.enums.ChargePayment; import com.angkorteam.fintech.dto.enums.ChargeTime; import com.angkorteam.fintech.dto.enums.ChargeType; import io.github.openunirest.http.JsonNode; public class ChargeBuilder implements Serializable { private ChargeType chargeAppliesTo; private boolean hasChargeAppliesTo; private ChargeTime chargeTimeType; private boolean hasChargeTimeType; private String name; private boolean hasName; private String currencyCode; private boolean hasCurrencyCode; private ChargeCalculation chargeCalculationType; private boolean hasChargeCalculationType; private ChargePayment chargePaymentMode; private boolean hasChargePaymentMode; private String taxGroupId; private boolean hasTaxGroupId; private boolean penalty; private boolean hasPenalty; private boolean active; private boolean hasActive; private Double amount; private boolean hasAmount; private Long feeInterval; private boolean hasFeeInterval; private ChargeFrequency feeFrequency; private boolean hasFeeFrequency; private String locale = "en"; private boolean hasLocale = true; private String incomeAccountId; private boolean hasIncomeAccountId; private String monthDayFormat = "dd MMMM"; private boolean hasMonthDayFormat = true; private Date feeOnMonthDay; private boolean hasFeeOnMonthDay; private String id; private boolean hasId; public JsonNode build() { JsonNode object = new com.angkorteam.fintech.dto.JsonNode(); if (this.hasId) { object.getObject().put("id", this.id); } if (this.hasIncomeAccountId) { object.getObject().put("incomeAccountId", this.incomeAccountId); } if (this.hasChargeAppliesTo) { if (this.chargeAppliesTo != null) { object.getObject().put("chargeAppliesTo", this.chargeAppliesTo.getLiteral()); } else { object.getObject().put("chargeAppliesTo", (String) null); } } if (this.hasChargeTimeType) { if (this.chargeTimeType != null) { object.getObject().put("chargeTimeType", this.chargeTimeType.getLiteral()); } else { object.getObject().put("chargeTimeType", (String) null); } } if (this.hasName) { object.getObject().put("name", this.name); } if (this.hasCurrencyCode) { object.getObject().put("currencyCode", this.currencyCode); } if (this.hasChargeCalculationType) { if (this.chargeCalculationType != null) { object.getObject().put("chargeCalculationType", this.chargeCalculationType.getLiteral()); } else { object.getObject().put("chargeCalculationType", (String) null); } } if (this.hasChargePaymentMode) { if (this.chargePaymentMode != null) { object.getObject().put("chargePaymentMode", this.chargePaymentMode.getLiteral()); } else { object.getObject().put("chargePaymentMode", (String) null); } } if (this.hasTaxGroupId) { object.getObject().put("taxGroupId", this.taxGroupId); } if (this.hasPenalty) { object.getObject().put("penalty", this.penalty); } if (this.hasActive) { object.getObject().put("active", this.active); } if (this.hasAmount) { object.getObject().put("amount", this.amount); } if (this.hasFeeInterval) { object.getObject().put("feeInterval", this.feeInterval); } if (this.hasFeeFrequency) { if (this.feeFrequency != null) { object.getObject().put("feeFrequency", this.feeFrequency.getLiteral()); } else { object.getObject().put("feeFrequency", (String) null); } } if (this.hasLocale) { object.getObject().put("locale", this.locale); } if (this.hasMonthDayFormat) { object.getObject().put("monthDayFormat", this.monthDayFormat); } if (this.hasFeeOnMonthDay) { if (this.feeOnMonthDay != null) { object.getObject().put("feeOnMonthDay", DateFormatUtils.format(this.feeOnMonthDay, this.monthDayFormat)); } else { object.getObject().put("feeOnMonthDay", (String) null); } } return object; } public ChargeBuilder withId(String id) { this.id = id; this.hasId = true; return this; } public ChargeBuilder withIncomeAccountId(String incomeAccountId) { this.incomeAccountId = incomeAccountId; this.hasIncomeAccountId = true; return this; } public ChargeBuilder withChargeAppliesTo(ChargeType chargeAppliesTo) { this.chargeAppliesTo = chargeAppliesTo; this.hasChargeAppliesTo = true; return this; } public ChargeBuilder withChargeTimeType(ChargeTime chargeTimeType) { this.chargeTimeType = chargeTimeType; this.hasChargeTimeType = true; return this; } public ChargeBuilder withName(String name) { this.name = name; this.hasName = true; return this; } public ChargeBuilder withCurrencyCode(String currencyCode) { this.currencyCode = currencyCode; this.hasCurrencyCode = true; return this; } public ChargeBuilder withChargeCalculationType(ChargeCalculation chargeCalculationType) { this.chargeCalculationType = chargeCalculationType; this.hasChargeCalculationType = true; return this; } public ChargeBuilder withChargePaymentMode(ChargePayment chargePaymentMode) { this.chargePaymentMode = chargePaymentMode; this.hasChargePaymentMode = true; return this; } public ChargeBuilder withTaxGroupId(String taxGroupId) { this.taxGroupId = taxGroupId; this.hasTaxGroupId = true; return this; } public ChargeBuilder withPenalty(boolean penalty) { this.penalty = penalty; this.hasPenalty = true; return this; } public ChargeBuilder withActive(boolean active) { this.active = active; this.hasActive = true; return this; } public ChargeBuilder withAmount(Double amount) { this.amount = amount; this.hasAmount = true; return this; } public ChargeBuilder withFeeInterval(Long feeInterval) { this.feeInterval = feeInterval; this.hasFeeInterval = true; return this; } public ChargeBuilder withFeeFrequency(ChargeFrequency feeFrequency) { this.feeFrequency = feeFrequency; this.hasFeeFrequency = true; return this; } public ChargeBuilder withLocale(String locale) { this.locale = locale; this.hasLocale = true; return this; } public ChargeBuilder withMonthDayFormat(String monthDayFormat) { this.monthDayFormat = monthDayFormat; this.hasMonthDayFormat = true; return this; } public ChargeBuilder withFeeOnMonthDay(Date feeOnMonthDay) { this.feeOnMonthDay = feeOnMonthDay; this.hasFeeOnMonthDay = true; return this; } }
package org.broadinstitute.sting.utils.codecs.vcf; import org.broad.tribble.TribbleException; import org.broad.tribble.readers.LineReader; import org.broad.tribble.util.ParsingUtils; import org.broadinstitute.sting.utils.variantcontext.*; import java.io.IOException; import java.util.*; /** * A feature codec for the VCF 4 specification * * <p> * VCF is a text file format (most likely stored in a compressed manner). It contains meta-information lines, a * header line, and then data lines each containing information about a position in the genome. * </p> * <p>One of the main uses of next-generation sequencing is to discover variation amongst large populations * of related samples. Recently the format for storing next-generation read alignments has been * standardised by the SAM/BAM file format specification. This has significantly improved the * interoperability of next-generation tools for alignment, visualisation, and variant calling. * We propose the Variant Call Format (VCF) as a standarised format for storing the most prevalent * types of sequence variation, including SNPs, indels and larger structural variants, together * with rich annotations. VCF is usually stored in a compressed manner and can be indexed for * fast data retrieval of variants from a range of positions on the reference genome. * The format was developed for the 1000 Genomes Project, and has also been adopted by other projects * such as UK10K, dbSNP, or the NHLBI Exome Project. VCFtools is a software suite that implements * various utilities for processing VCF files, including validation, merging and comparing, * and also provides a general Perl and Python API. * The VCF specification and VCFtools are available from http://vcftools.sourceforge.net.</p> * * <p> * See also: @see <a href="http://vcftools.sourceforge.net/specs.html">VCF specification</a><br> * See also: @see <a href="http://www.ncbi.nlm.nih.gov/pubmed/21653522">VCF spec. publication</a> * </p> * * <h2>File format example</h2> * <pre> * ##fileformat=VCFv4.0 * #CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA12878 * chr1 109 . A T 0 PASS AC=1 GT:AD:DP:GL:GQ 0/1:610,327:308:-316.30,-95.47,-803.03:99 * chr1 147 . C A 0 PASS AC=1 GT:AD:DP:GL:GQ 0/1:294,49:118:-57.87,-34.96,-338.46:99 * </pre> * * @author Mark DePristo * @since 2010 */ public class VCFCodec extends AbstractVCFCodec { // Our aim is to read in the records and convert to VariantContext as quickly as possible, relying on VariantContext to do the validation of any contradictory (or malformed) record parameters. public final static String VCF4_MAGIC_HEADER = "##fileformat=VCFv4"; /** * @param reader the line reader to take header lines from * @return the number of header lines */ public Object readHeader(LineReader reader) { List<String> headerStrings = new ArrayList<String>(); String line; try { boolean foundHeaderVersion = false; while ((line = reader.readLine()) != null) { lineNo++; if (line.startsWith(VCFHeader.METADATA_INDICATOR)) { String[] lineFields = line.substring(2).split("="); if (lineFields.length == 2 && VCFHeaderVersion.isFormatString(lineFields[0]) ) { if ( !VCFHeaderVersion.isVersionString(lineFields[1]) ) throw new TribbleException.InvalidHeader(lineFields[1] + " is not a supported version"); foundHeaderVersion = true; version = VCFHeaderVersion.toHeaderVersion(lineFields[1]); if ( version == VCFHeaderVersion.VCF3_3 || version == VCFHeaderVersion.VCF3_2 ) throw new TribbleException.InvalidHeader("This codec is strictly for VCFv4; please use the VCF3 codec for " + lineFields[1]); if ( version != VCFHeaderVersion.VCF4_0 && version != VCFHeaderVersion.VCF4_1 ) throw new TribbleException.InvalidHeader("This codec is strictly for VCFv4 and does not support " + lineFields[1]); } headerStrings.add(line); } else if (line.startsWith(VCFHeader.HEADER_INDICATOR)) { if (!foundHeaderVersion) { throw new TribbleException.InvalidHeader("We never saw a header line specifying VCF version"); } return createHeader(headerStrings, line); } else { throw new TribbleException.InvalidHeader("We never saw the required CHROM header line (starting with one #) for the input VCF file"); } } } catch (IOException e) { throw new RuntimeException("IO Exception ", e); } throw new TribbleException.InvalidHeader("We never saw the required CHROM header line (starting with one #) for the input VCF file"); } /** * parse the filter string, first checking to see if we already have parsed it in a previous attempt * * @param filterString the string to parse * @return a set of the filters applied or null if filters were not applied to the record (e.g. as per the missing value in a VCF) */ protected Set<String> parseFilters(String filterString) { return parseFilters(filterHash, lineNo, filterString); } public static Set<String> parseFilters(final Map<String, LinkedHashSet<String>> cache, final int lineNo, final String filterString) { // null for unfiltered if ( filterString.equals(VCFConstants.UNFILTERED) ) return null; if ( filterString.equals(VCFConstants.PASSES_FILTERS_v4) ) return Collections.emptySet(); if ( filterString.equals(VCFConstants.PASSES_FILTERS_v3) ) generateException(VCFConstants.PASSES_FILTERS_v3 + " is an invalid filter name in vcf4", lineNo); if ( filterString.length() == 0 ) generateException("The VCF specification requires a valid filter status: filter was " + filterString, lineNo); // do we have the filter string cached? if ( cache != null && cache.containsKey(filterString) ) return Collections.unmodifiableSet(cache.get(filterString)); // empty set for passes filters LinkedHashSet<String> fFields = new LinkedHashSet<String>(); // otherwise we have to parse and cache the value if ( filterString.indexOf(VCFConstants.FILTER_CODE_SEPARATOR) == -1 ) fFields.add(filterString); else fFields.addAll(Arrays.asList(filterString.split(VCFConstants.FILTER_CODE_SEPARATOR))); fFields = fFields; if ( cache != null ) cache.put(filterString, fFields); return Collections.unmodifiableSet(fFields); } /** * create a genotype map * * @param str the string * @param alleles the list of alleles * @return a mapping of sample name to genotype object */ public LazyGenotypesContext.LazyData createGenotypeMap(String str, List<Allele> alleles, String chr, int pos) { if (genotypeParts == null) genotypeParts = new String[header.getColumnCount() - NUM_STANDARD_FIELDS]; int nParts = ParsingUtils.split(str, genotypeParts, VCFConstants.FIELD_SEPARATOR_CHAR); if ( nParts != genotypeParts.length ) generateException("there are " + (nParts-1) + " genotypes while the header requires that " + (genotypeParts.length-1) + " genotypes be present for all records", lineNo); ArrayList<Genotype> genotypes = new ArrayList<Genotype>(nParts); // get the format keys int nGTKeys = ParsingUtils.split(genotypeParts[0], genotypeKeyArray, VCFConstants.GENOTYPE_FIELD_SEPARATOR_CHAR); // cycle through the sample names Iterator<String> sampleNameIterator = header.getGenotypeSamples().iterator(); // clear out our allele mapping alleleMap.clear(); // cycle through the genotype strings for (int genotypeOffset = 1; genotypeOffset < nParts; genotypeOffset++) { int GTValueSplitSize = ParsingUtils.split(genotypeParts[genotypeOffset], GTValueArray, VCFConstants.GENOTYPE_FIELD_SEPARATOR_CHAR); double GTQual = VariantContext.NO_LOG10_PERROR; Set<String> genotypeFilters = null; Map<String, Object> gtAttributes = null; String sampleName = sampleNameIterator.next(); // check to see if the value list is longer than the key list, which is a problem if (nGTKeys < GTValueSplitSize) generateException("There are too many keys for the sample " + sampleName + ", keys = " + parts[8] + ", values = " + parts[genotypeOffset]); int genotypeAlleleLocation = -1; if (nGTKeys >= 1) { gtAttributes = new HashMap<String, Object>(nGTKeys - 1); for (int i = 0; i < nGTKeys; i++) { final String gtKey = new String(genotypeKeyArray[i]); boolean missing = i >= GTValueSplitSize; // todo -- all of these on the fly parsing of the missing value should be static constants if (gtKey.equals(VCFConstants.GENOTYPE_KEY)) { genotypeAlleleLocation = i; } else if (gtKey.equals(VCFConstants.GENOTYPE_QUALITY_KEY)) { GTQual = missing ? parseQual(VCFConstants.MISSING_VALUE_v4) : parseQual(GTValueArray[i]); } else if (gtKey.equals(VCFConstants.GENOTYPE_FILTER_KEY)) { genotypeFilters = missing ? parseFilters(VCFConstants.MISSING_VALUE_v4) : parseFilters(getCachedString(GTValueArray[i])); } else if ( missing ) { gtAttributes.put(gtKey, VCFConstants.MISSING_VALUE_v4); } else { gtAttributes.put(gtKey, new String(GTValueArray[i])); } } } // check to make sure we found a genotype field if we are a VCF4.0 file if ( version == VCFHeaderVersion.VCF4_0 && genotypeAlleleLocation == -1 ) generateException("Unable to find the GT field for the record; the GT field is required in VCF4.0"); if ( genotypeAlleleLocation > 0 ) generateException("Saw GT field at position " + genotypeAlleleLocation + ", but it must be at the first position for genotypes when present"); List<Allele> GTalleles = (genotypeAlleleLocation == -1 ? new ArrayList<Allele>(0) : parseGenotypeAlleles(GTValueArray[genotypeAlleleLocation], alleles, alleleMap)); boolean phased = genotypeAlleleLocation != -1 && GTValueArray[genotypeAlleleLocation].indexOf(VCFConstants.PHASED) != -1; // add it to the list try { genotypes.add(new Genotype(sampleName, GTalleles, GTQual, genotypeFilters, gtAttributes, phased)); } catch (TribbleException e) { throw new TribbleException.InternalCodecException(e.getMessage() + ", at position " + chr+":"+pos); } } return new LazyGenotypesContext.LazyData(genotypes, header.sampleNamesInOrder, header.sampleNameToOffset); } @Override public boolean canDecode(final String potentialInput) { return canDecodeFile(potentialInput, VCF4_MAGIC_HEADER); } }
package com.AirMaps; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.drawable.Animatable; import android.net.Uri; import android.view.View; import android.widget.LinearLayout; import com.facebook.common.references.CloseableReference; import com.facebook.datasource.DataSource; import com.facebook.drawee.backends.pipeline.Fresco; import com.facebook.drawee.controller.BaseControllerListener; import com.facebook.drawee.controller.ControllerListener; import com.facebook.drawee.drawable.ScalingUtils; import com.facebook.drawee.generic.GenericDraweeHierarchy; import com.facebook.drawee.generic.GenericDraweeHierarchyBuilder; import com.facebook.drawee.interfaces.DraweeController; import com.facebook.drawee.view.DraweeHolder; import com.facebook.imagepipeline.core.ImagePipeline; import com.facebook.imagepipeline.image.CloseableImage; import com.facebook.imagepipeline.image.CloseableStaticBitmap; import com.facebook.imagepipeline.image.ImageInfo; import com.facebook.imagepipeline.request.ImageRequest; import com.facebook.imagepipeline.request.ImageRequestBuilder; import com.facebook.react.bridge.ReadableMap; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.model.BitmapDescriptor; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import javax.annotation.Nullable; public class AirMapMarker extends AirMapFeature { private MarkerOptions markerOptions; private Marker marker; private int width; private int height; private LatLng position; private String title; private String snippet; private boolean anchorIsSet; private float anchorX; private float anchorY; private AirMapCallout calloutView; private View wrappedCalloutView; private Context context; private float markerHue = 0.0f; // should be between 0 and 360 private BitmapDescriptor iconBitmapDescriptor; private float rotation = 0.0f; private boolean flat = false; private boolean draggable = false; private float calloutAnchorX; private float calloutAnchorY; private boolean calloutAnchorIsSet; private boolean hasCustomMarkerView = false; private final DraweeHolder mLogoHolder; private DataSource<CloseableReference<CloseableImage>> dataSource; private final ControllerListener<ImageInfo> mLogoControllerListener = new BaseControllerListener<ImageInfo>() { @Override public void onFinalImageSet( String id, @Nullable final ImageInfo imageInfo, @Nullable Animatable animatable) { CloseableReference<CloseableImage> imageReference = null; try { imageReference = dataSource.getResult(); if (imageReference != null) { CloseableImage image = imageReference.get(); if (image != null && image instanceof CloseableStaticBitmap) { CloseableStaticBitmap closeableStaticBitmap = (CloseableStaticBitmap) image; Bitmap bitmap = closeableStaticBitmap.getUnderlyingBitmap(); if (bitmap != null) { bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true); iconBitmapDescriptor = BitmapDescriptorFactory.fromBitmap(bitmap); } } } } finally { dataSource.close(); if (imageReference != null) { CloseableReference.closeSafely(imageReference); } } update(); } }; public AirMapMarker(Context context) { super(context); this.context = context; mLogoHolder = DraweeHolder.create(createDraweeHierarchy(), context); mLogoHolder.onAttach(); } private GenericDraweeHierarchy createDraweeHierarchy() { return new GenericDraweeHierarchyBuilder(getResources()) .setActualImageScaleType(ScalingUtils.ScaleType.FIT_CENTER) .setFadeDuration(0) .build(); } public void setCoordinate(ReadableMap coordinate) { position = new LatLng(coordinate.getDouble("latitude"), coordinate.getDouble("longitude")); if (marker != null) { marker.setPosition(position); } update(); } public void setTitle(String title) { this.title = title; if (marker != null) { marker.setTitle(title); } update(); } public void setSnippet(String snippet) { this.snippet = snippet; if (marker != null) { marker.setSnippet(snippet); } update(); } public void setRotation(float rotation) { this.rotation = rotation; if (marker != null) { marker.setRotation(rotation); } update(); } public void setFlat(boolean flat) { this.flat = flat; if (marker != null) { marker.setFlat(flat); } update(); } public void setDraggable(boolean draggable) { this.draggable = draggable; if (marker != null) { marker.setDraggable(draggable); } update(); } public void setMarkerHue(float markerHue) { this.markerHue = markerHue; update(); } public void setAnchor(double x, double y) { anchorIsSet = true; anchorX = (float)x; anchorY = (float)y; if (marker != null) { marker.setAnchor(anchorX, anchorY); } update(); } public void setCalloutAnchor(double x, double y) { calloutAnchorIsSet = true; calloutAnchorX = (float)x; calloutAnchorY = (float)y; if (marker != null) { marker.setInfoWindowAnchor(calloutAnchorX, calloutAnchorY); } update(); } public void setImage(String uri) { if (uri == null) { iconBitmapDescriptor = null; update(); } else if (uri.startsWith("http://") || uri.startsWith("https://")) { ImageRequest imageRequest = ImageRequestBuilder .newBuilderWithSource(Uri.parse(uri)) .build(); ImagePipeline imagePipeline = Fresco.getImagePipeline(); dataSource = imagePipeline.fetchDecodedImage(imageRequest, this); DraweeController controller = Fresco.newDraweeControllerBuilder() .setImageRequest(imageRequest) .setControllerListener(mLogoControllerListener) .setOldController(mLogoHolder.getController()) .build(); mLogoHolder.setController(controller); } else { iconBitmapDescriptor = getBitmapDescriptorByName(uri); update(); } } public MarkerOptions getMarkerOptions() { if (markerOptions == null) { markerOptions = createMarkerOptions(); } return markerOptions; } @Override public void addView(View child, int index) { super.addView(child, index); // if children are added, it means we are rendering a custom marker if (!(child instanceof AirMapCallout)) { hasCustomMarkerView = true; } update(); } @Override public Object getFeature() { return marker; } @Override public void addToMap(GoogleMap map) { marker = map.addMarker(getMarkerOptions()); } @Override public void removeFromMap(GoogleMap map) { marker.remove(); marker = null; } private BitmapDescriptor getIcon() { if (hasCustomMarkerView) { // creating a bitmap from an arbitrary view return BitmapDescriptorFactory.fromBitmap(createDrawable()); } else if (iconBitmapDescriptor != null) { // use local image as a marker return iconBitmapDescriptor; } else { // render the default marker pin return BitmapDescriptorFactory.defaultMarker(this.markerHue); } } private MarkerOptions createMarkerOptions() { MarkerOptions options = new MarkerOptions().position(position); if (anchorIsSet) options.anchor(anchorX, anchorY); if (calloutAnchorIsSet) options.infoWindowAnchor(calloutAnchorX, calloutAnchorY); options.title(title); options.snippet(snippet); options.rotation(rotation); options.flat(flat); options.draggable(draggable); options.icon(getIcon()); return options; } public void update() { if (marker == null) { return; } marker.setIcon(getIcon()); } public void update(int width, int height) { this.width = width; this.height = height; update(); } private Bitmap createDrawable() { int width = this.width <= 0 ? 100 : this.width; int height = this.height <= 0 ? 100 : this.height; this.buildDrawingCache(); Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); this.draw(canvas); return bitmap; } public void setCalloutView(AirMapCallout view) { this.calloutView = view; } public AirMapCallout getCalloutView() { return this.calloutView; } public View getCallout() { if (this.calloutView == null) return null; if (this.wrappedCalloutView == null) { this.wrapCalloutView(); } if (this.calloutView.getTooltip()) { return this.wrappedCalloutView; } else { return null; } } public View getInfoContents() { if (this.calloutView == null) return null; if (this.wrappedCalloutView == null) { this.wrapCalloutView(); } if (this.calloutView.getTooltip()) { return null; } else { return this.wrappedCalloutView; } } private void wrapCalloutView() { // some hackery is needed to get the arbitrary infowindow view to render centered, and // with only the width/height that it needs. if (this.calloutView == null || this.calloutView.getChildCount() == 0) { return; } LinearLayout LL = new LinearLayout(context); LL.setOrientation(LinearLayout.VERTICAL); LL.setLayoutParams(new LinearLayout.LayoutParams( this.calloutView.width, this.calloutView.height, 0f )); LinearLayout LL2 = new LinearLayout(context); LL2.setOrientation(LinearLayout.HORIZONTAL); LL2.setLayoutParams(new LinearLayout.LayoutParams( this.calloutView.width, this.calloutView.height, 0f )); LL.addView(LL2); LL2.addView(this.calloutView); this.wrappedCalloutView = LL; } private int getDrawableResourceByName(String name) { return getResources().getIdentifier( name, "drawable", getContext().getPackageName()); } private BitmapDescriptor getBitmapDescriptorByName(String name) { return BitmapDescriptorFactory.fromResource(getDrawableResourceByName(name)); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.devopsguru.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Details about a proactive insight. This object is returned by <code>DescribeInsight</code>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/devops-guru-2020-12-01/ProactiveOrganizationInsightSummary" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ProactiveOrganizationInsightSummary implements Serializable, Cloneable, StructuredPojo { /** * <p> * The ID of the insight summary. * </p> */ private String id; /** * <p> * The ID of the Amazon Web Services account. * </p> */ private String accountId; /** * <p> * The ID of the organizational unit. * </p> */ private String organizationalUnitId; /** * <p> * The name of the insight summary. * </p> */ private String name; /** * <p> * An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * </p> */ private String severity; /** * <p> * An array of status values used to search for insights. * </p> */ private String status; private InsightTimeRange insightTimeRange; private PredictionTimeRange predictionTimeRange; private ResourceCollection resourceCollection; private ServiceCollection serviceCollection; /** * <p> * The ID of the insight summary. * </p> * * @param id * The ID of the insight summary. */ public void setId(String id) { this.id = id; } /** * <p> * The ID of the insight summary. * </p> * * @return The ID of the insight summary. */ public String getId() { return this.id; } /** * <p> * The ID of the insight summary. * </p> * * @param id * The ID of the insight summary. * @return Returns a reference to this object so that method calls can be chained together. */ public ProactiveOrganizationInsightSummary withId(String id) { setId(id); return this; } /** * <p> * The ID of the Amazon Web Services account. * </p> * * @param accountId * The ID of the Amazon Web Services account. */ public void setAccountId(String accountId) { this.accountId = accountId; } /** * <p> * The ID of the Amazon Web Services account. * </p> * * @return The ID of the Amazon Web Services account. */ public String getAccountId() { return this.accountId; } /** * <p> * The ID of the Amazon Web Services account. * </p> * * @param accountId * The ID of the Amazon Web Services account. * @return Returns a reference to this object so that method calls can be chained together. */ public ProactiveOrganizationInsightSummary withAccountId(String accountId) { setAccountId(accountId); return this; } /** * <p> * The ID of the organizational unit. * </p> * * @param organizationalUnitId * The ID of the organizational unit. */ public void setOrganizationalUnitId(String organizationalUnitId) { this.organizationalUnitId = organizationalUnitId; } /** * <p> * The ID of the organizational unit. * </p> * * @return The ID of the organizational unit. */ public String getOrganizationalUnitId() { return this.organizationalUnitId; } /** * <p> * The ID of the organizational unit. * </p> * * @param organizationalUnitId * The ID of the organizational unit. * @return Returns a reference to this object so that method calls can be chained together. */ public ProactiveOrganizationInsightSummary withOrganizationalUnitId(String organizationalUnitId) { setOrganizationalUnitId(organizationalUnitId); return this; } /** * <p> * The name of the insight summary. * </p> * * @param name * The name of the insight summary. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the insight summary. * </p> * * @return The name of the insight summary. */ public String getName() { return this.name; } /** * <p> * The name of the insight summary. * </p> * * @param name * The name of the insight summary. * @return Returns a reference to this object so that method calls can be chained together. */ public ProactiveOrganizationInsightSummary withName(String name) { setName(name); return this; } /** * <p> * An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * </p> * * @param severity * An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * @see InsightSeverity */ public void setSeverity(String severity) { this.severity = severity; } /** * <p> * An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * </p> * * @return An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * @see InsightSeverity */ public String getSeverity() { return this.severity; } /** * <p> * An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * </p> * * @param severity * An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. * @see InsightSeverity */ public ProactiveOrganizationInsightSummary withSeverity(String severity) { setSeverity(severity); return this; } /** * <p> * An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * </p> * * @param severity * An array of severity values used to search for insights. For more information, see <a href= * "https://docs.aws.amazon.com/devops-guru/latest/userguide/working-with-insights.html#understanding-insights-severities" * >Understanding insight severities</a> in the <i>Amazon DevOps Guru User Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. * @see InsightSeverity */ public ProactiveOrganizationInsightSummary withSeverity(InsightSeverity severity) { this.severity = severity.toString(); return this; } /** * <p> * An array of status values used to search for insights. * </p> * * @param status * An array of status values used to search for insights. * @see InsightStatus */ public void setStatus(String status) { this.status = status; } /** * <p> * An array of status values used to search for insights. * </p> * * @return An array of status values used to search for insights. * @see InsightStatus */ public String getStatus() { return this.status; } /** * <p> * An array of status values used to search for insights. * </p> * * @param status * An array of status values used to search for insights. * @return Returns a reference to this object so that method calls can be chained together. * @see InsightStatus */ public ProactiveOrganizationInsightSummary withStatus(String status) { setStatus(status); return this; } /** * <p> * An array of status values used to search for insights. * </p> * * @param status * An array of status values used to search for insights. * @return Returns a reference to this object so that method calls can be chained together. * @see InsightStatus */ public ProactiveOrganizationInsightSummary withStatus(InsightStatus status) { this.status = status.toString(); return this; } /** * @param insightTimeRange */ public void setInsightTimeRange(InsightTimeRange insightTimeRange) { this.insightTimeRange = insightTimeRange; } /** * @return */ public InsightTimeRange getInsightTimeRange() { return this.insightTimeRange; } /** * @param insightTimeRange * @return Returns a reference to this object so that method calls can be chained together. */ public ProactiveOrganizationInsightSummary withInsightTimeRange(InsightTimeRange insightTimeRange) { setInsightTimeRange(insightTimeRange); return this; } /** * @param predictionTimeRange */ public void setPredictionTimeRange(PredictionTimeRange predictionTimeRange) { this.predictionTimeRange = predictionTimeRange; } /** * @return */ public PredictionTimeRange getPredictionTimeRange() { return this.predictionTimeRange; } /** * @param predictionTimeRange * @return Returns a reference to this object so that method calls can be chained together. */ public ProactiveOrganizationInsightSummary withPredictionTimeRange(PredictionTimeRange predictionTimeRange) { setPredictionTimeRange(predictionTimeRange); return this; } /** * @param resourceCollection */ public void setResourceCollection(ResourceCollection resourceCollection) { this.resourceCollection = resourceCollection; } /** * @return */ public ResourceCollection getResourceCollection() { return this.resourceCollection; } /** * @param resourceCollection * @return Returns a reference to this object so that method calls can be chained together. */ public ProactiveOrganizationInsightSummary withResourceCollection(ResourceCollection resourceCollection) { setResourceCollection(resourceCollection); return this; } /** * @param serviceCollection */ public void setServiceCollection(ServiceCollection serviceCollection) { this.serviceCollection = serviceCollection; } /** * @return */ public ServiceCollection getServiceCollection() { return this.serviceCollection; } /** * @param serviceCollection * @return Returns a reference to this object so that method calls can be chained together. */ public ProactiveOrganizationInsightSummary withServiceCollection(ServiceCollection serviceCollection) { setServiceCollection(serviceCollection); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getId() != null) sb.append("Id: ").append(getId()).append(","); if (getAccountId() != null) sb.append("AccountId: ").append(getAccountId()).append(","); if (getOrganizationalUnitId() != null) sb.append("OrganizationalUnitId: ").append(getOrganizationalUnitId()).append(","); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getSeverity() != null) sb.append("Severity: ").append(getSeverity()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getInsightTimeRange() != null) sb.append("InsightTimeRange: ").append(getInsightTimeRange()).append(","); if (getPredictionTimeRange() != null) sb.append("PredictionTimeRange: ").append(getPredictionTimeRange()).append(","); if (getResourceCollection() != null) sb.append("ResourceCollection: ").append(getResourceCollection()).append(","); if (getServiceCollection() != null) sb.append("ServiceCollection: ").append(getServiceCollection()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ProactiveOrganizationInsightSummary == false) return false; ProactiveOrganizationInsightSummary other = (ProactiveOrganizationInsightSummary) obj; if (other.getId() == null ^ this.getId() == null) return false; if (other.getId() != null && other.getId().equals(this.getId()) == false) return false; if (other.getAccountId() == null ^ this.getAccountId() == null) return false; if (other.getAccountId() != null && other.getAccountId().equals(this.getAccountId()) == false) return false; if (other.getOrganizationalUnitId() == null ^ this.getOrganizationalUnitId() == null) return false; if (other.getOrganizationalUnitId() != null && other.getOrganizationalUnitId().equals(this.getOrganizationalUnitId()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getSeverity() == null ^ this.getSeverity() == null) return false; if (other.getSeverity() != null && other.getSeverity().equals(this.getSeverity()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getInsightTimeRange() == null ^ this.getInsightTimeRange() == null) return false; if (other.getInsightTimeRange() != null && other.getInsightTimeRange().equals(this.getInsightTimeRange()) == false) return false; if (other.getPredictionTimeRange() == null ^ this.getPredictionTimeRange() == null) return false; if (other.getPredictionTimeRange() != null && other.getPredictionTimeRange().equals(this.getPredictionTimeRange()) == false) return false; if (other.getResourceCollection() == null ^ this.getResourceCollection() == null) return false; if (other.getResourceCollection() != null && other.getResourceCollection().equals(this.getResourceCollection()) == false) return false; if (other.getServiceCollection() == null ^ this.getServiceCollection() == null) return false; if (other.getServiceCollection() != null && other.getServiceCollection().equals(this.getServiceCollection()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode()); hashCode = prime * hashCode + ((getAccountId() == null) ? 0 : getAccountId().hashCode()); hashCode = prime * hashCode + ((getOrganizationalUnitId() == null) ? 0 : getOrganizationalUnitId().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getSeverity() == null) ? 0 : getSeverity().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getInsightTimeRange() == null) ? 0 : getInsightTimeRange().hashCode()); hashCode = prime * hashCode + ((getPredictionTimeRange() == null) ? 0 : getPredictionTimeRange().hashCode()); hashCode = prime * hashCode + ((getResourceCollection() == null) ? 0 : getResourceCollection().hashCode()); hashCode = prime * hashCode + ((getServiceCollection() == null) ? 0 : getServiceCollection().hashCode()); return hashCode; } @Override public ProactiveOrganizationInsightSummary clone() { try { return (ProactiveOrganizationInsightSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.devopsguru.model.transform.ProactiveOrganizationInsightSummaryMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xmlrpc.server; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.xmlrpc.XmlRpcException; import org.apache.xmlrpc.XmlRpcHandler; import org.apache.xmlrpc.XmlRpcRequest; import org.apache.xmlrpc.common.TypeConverterFactory; import org.apache.xmlrpc.common.TypeConverterFactoryImpl; import org.apache.xmlrpc.metadata.ReflectiveXmlRpcMetaDataHandler; import org.apache.xmlrpc.metadata.Util; import org.apache.xmlrpc.metadata.XmlRpcListableHandlerMapping; import org.apache.xmlrpc.metadata.XmlRpcMetaDataHandler; import org.apache.xmlrpc.server.RequestProcessorFactoryFactory.RequestProcessorFactory; /** Abstract base class of handler mappings, which are * using reflection. */ public abstract class AbstractReflectiveHandlerMapping implements XmlRpcListableHandlerMapping { /** An object implementing this interface may be used * to validate user names and passwords. */ public interface AuthenticationHandler { /** Returns, whether the user is authenticated and * authorized to perform the request. */ boolean isAuthorized(XmlRpcRequest pRequest) throws XmlRpcException; } private TypeConverterFactory typeConverterFactory = new TypeConverterFactoryImpl(); protected Map handlerMap = new HashMap(); private AuthenticationHandler authenticationHandler; private RequestProcessorFactoryFactory requestProcessorFactoryFactory = new RequestProcessorFactoryFactory.RequestSpecificProcessorFactoryFactory(); private boolean voidMethodEnabled; /** * Sets the mappings {@link TypeConverterFactory}. */ public void setTypeConverterFactory(TypeConverterFactory pFactory) { typeConverterFactory = pFactory; } /** * Returns the mappings {@link TypeConverterFactory}. */ public TypeConverterFactory getTypeConverterFactory() { return typeConverterFactory; } /** Sets the mappings {@link RequestProcessorFactoryFactory}. Note, that this doesn't * affect already registered handlers. */ public void setRequestProcessorFactoryFactory(RequestProcessorFactoryFactory pFactory) { requestProcessorFactoryFactory = pFactory; } /** Returns the mappings {@link RequestProcessorFactoryFactory}. */ public RequestProcessorFactoryFactory getRequestProcessorFactoryFactory() { return requestProcessorFactoryFactory; } /** Returns the authentication handler, if any, or null. */ public AuthenticationHandler getAuthenticationHandler() { return authenticationHandler; } /** Sets the authentication handler, if any, or null. */ public void setAuthenticationHandler(AuthenticationHandler pAuthenticationHandler) { authenticationHandler = pAuthenticationHandler; } protected boolean isHandlerMethod(Method pMethod) { if (!Modifier.isPublic(pMethod.getModifiers())) { return false; // Ignore methods, which aren't public } if (Modifier.isStatic(pMethod.getModifiers())) { return false; // Ignore methods, which are static } if (!isVoidMethodEnabled() && pMethod.getReturnType() == void.class) { return false; // Ignore void methods. } if (pMethod.getDeclaringClass() == Object.class) { return false; // Ignore methods from Object.class } return true; } /** Searches for methods in the given class. For any valid * method, it creates an instance of {@link XmlRpcHandler}. * Valid methods are defined as follows: * <ul> * <li>They must be public.</li> * <li>They must not be static.</li> * <li>The return type must not be void.</li> * <li>The declaring class must not be * {@link java.lang.Object}.</li> * <li>If multiple methods with the same name exist, * which meet the above conditins, then an attempt is * made to identify a method with a matching signature. * If such a method is found, then this method is * invoked. If multiple such methods are found, then * the first one is choosen. (This may be the case, * for example, if there are methods with a similar * signature, but varying subclasses.) Note, that * there is no concept of the "most matching" method. * If no matching method is found at all, then an * exception is thrown.</li> * </ul> * @param pKey Suffix for building handler names. A dot and * the method name are being added. * @param pType The class being inspected. */ protected void registerPublicMethods(String pKey, Class pType) throws XmlRpcException { Map map = new HashMap(); Method[] methods = pType.getMethods(); for (int i = 0; i < methods.length; i++) { final Method method = methods[i]; if (!isHandlerMethod(method)) { continue; } String name = pKey + "." + method.getName(); Method[] mArray; Method[] oldMArray = (Method[]) map.get(name); if (oldMArray == null) { mArray = new Method[]{method}; } else { mArray = new Method[oldMArray.length+1]; System.arraycopy(oldMArray, 0, mArray, 0, oldMArray.length); mArray[oldMArray.length] = method; } map.put(name, mArray); } for (Iterator iter = map.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Method[] mArray = (Method[]) entry.getValue(); handlerMap.put(name, newXmlRpcHandler(pType, mArray)); } } /** Creates a new instance of {@link XmlRpcHandler}. * @param pClass The class, which was inspected for handler * methods. This is used for error messages only. Typically, * it is the same than <pre>pInstance.getClass()</pre>. * @param pMethods The method being invoked. */ protected XmlRpcHandler newXmlRpcHandler(final Class pClass, final Method[] pMethods) throws XmlRpcException { String[][] sig = getSignature(pMethods); String help = getMethodHelp(pClass, pMethods); RequestProcessorFactory factory = requestProcessorFactoryFactory.getRequestProcessorFactory(pClass); if (sig == null || help == null) { return new ReflectiveXmlRpcHandler(this, typeConverterFactory, pClass, factory, pMethods); } return new ReflectiveXmlRpcMetaDataHandler(this, typeConverterFactory, pClass, factory, pMethods, sig, help); } /** Creates a signature for the given method. */ protected String[][] getSignature(Method[] pMethods) { return Util.getSignature(pMethods); } /** Creates a help string for the given method, when applied * to the given class. */ protected String getMethodHelp(Class pClass, Method[] pMethods) { return Util.getMethodHelp(pClass, pMethods); } /** Returns the {@link XmlRpcHandler} with the given name. * @param pHandlerName The handlers name * @throws XmlRpcNoSuchHandlerException A handler with the given * name is unknown. */ public XmlRpcHandler getHandler(String pHandlerName) throws XmlRpcNoSuchHandlerException, XmlRpcException { XmlRpcHandler result = (XmlRpcHandler) handlerMap.get(pHandlerName); if (result == null) { throw new XmlRpcNoSuchHandlerException("No such handler: " + pHandlerName); } return result; } public String[] getListMethods() throws XmlRpcException { List list = new ArrayList(); for (Iterator iter = handlerMap.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); if (entry.getValue() instanceof XmlRpcMetaDataHandler) { list.add(entry.getKey()); } } return (String[]) list.toArray(new String[list.size()]); } public String getMethodHelp(String pHandlerName) throws XmlRpcException { XmlRpcHandler h = getHandler(pHandlerName); if (h instanceof XmlRpcMetaDataHandler) return ((XmlRpcMetaDataHandler)h).getMethodHelp(); throw new XmlRpcNoSuchHandlerException("No help available for method: " + pHandlerName); } public String[][] getMethodSignature(String pHandlerName) throws XmlRpcException { XmlRpcHandler h = getHandler(pHandlerName); if (h instanceof XmlRpcMetaDataHandler) return ((XmlRpcMetaDataHandler)h).getSignatures(); throw new XmlRpcNoSuchHandlerException("No metadata available for method: " + pHandlerName); } /** * Returns, whether void methods are enabled. By default, null values * aren't supported by XML-RPC and void methods are in fact returning * null (at least from the perspective of reflection). */ public boolean isVoidMethodEnabled() { return voidMethodEnabled; } /** * Sets, whether void methods are enabled. By default, null values * aren't supported by XML-RPC and void methods are in fact returning * null (at least from the perspective of reflection). */ public void setVoidMethodEnabled(boolean pVoidMethodEnabled) { voidMethodEnabled = pVoidMethodEnabled; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.parse; import java.io.IOException; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.sql.Date; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.hooks.LineageInfo; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import com.google.common.annotations.VisibleForTesting; /** * BaseSemanticAnalyzer. * */ public abstract class BaseSemanticAnalyzer { protected static final Log STATIC_LOG = LogFactory.getLog(BaseSemanticAnalyzer.class.getName()); protected final Hive db; protected final HiveConf conf; protected List<Task<? extends Serializable>> rootTasks; protected FetchTask fetchTask; protected final Log LOG; protected final LogHelper console; protected Context ctx; protected HashMap<String, String> idToTableNameMap; protected QueryProperties queryProperties; /** * A set of FileSinkOperators being written to in an ACID compliant way. We need to remember * them here because when we build them we don't yet know the transaction id. We need to go * back and set it once we actually start running the query. */ protected Set<FileSinkDesc> acidFileSinks = new HashSet<FileSinkDesc>(); public static int HIVE_COLUMN_ORDER_ASC = 1; public static int HIVE_COLUMN_ORDER_DESC = 0; /** * ReadEntitites that are passed to the hooks. */ protected HashSet<ReadEntity> inputs; /** * List of WriteEntities that are passed to the hooks. */ protected HashSet<WriteEntity> outputs; /** * Lineage information for the query. */ protected LineageInfo linfo; protected TableAccessInfo tableAccessInfo; protected ColumnAccessInfo columnAccessInfo; /** * Columns accessed by updates */ protected ColumnAccessInfo updateColumnAccessInfo; public boolean skipAuthorization() { return false; } class RowFormatParams { String fieldDelim = null; String fieldEscape = null; String collItemDelim = null; String mapKeyDelim = null; String lineDelim = null; String nullFormat = null; protected void analyzeRowFormat(ASTNode child) throws SemanticException { child = (ASTNode) child.getChild(0); int numChildRowFormat = child.getChildCount(); for (int numC = 0; numC < numChildRowFormat; numC++) { ASTNode rowChild = (ASTNode) child.getChild(numC); switch (rowChild.getToken().getType()) { case HiveParser.TOK_TABLEROWFORMATFIELD: fieldDelim = unescapeSQLString(rowChild.getChild(0) .getText()); if (rowChild.getChildCount() >= 2) { fieldEscape = unescapeSQLString(rowChild .getChild(1).getText()); } break; case HiveParser.TOK_TABLEROWFORMATCOLLITEMS: collItemDelim = unescapeSQLString(rowChild .getChild(0).getText()); break; case HiveParser.TOK_TABLEROWFORMATMAPKEYS: mapKeyDelim = unescapeSQLString(rowChild.getChild(0) .getText()); break; case HiveParser.TOK_TABLEROWFORMATLINES: lineDelim = unescapeSQLString(rowChild.getChild(0) .getText()); if (!lineDelim.equals("\n") && !lineDelim.equals("10")) { throw new SemanticException(SemanticAnalyzer.generateErrorMessage(rowChild, ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg())); } break; case HiveParser.TOK_TABLEROWFORMATNULL: nullFormat = unescapeSQLString(rowChild.getChild(0) .getText()); break; default: throw new AssertionError("Unkown Token: " + rowChild); } } } } public BaseSemanticAnalyzer(HiveConf conf) throws SemanticException { this(conf, createHiveDB(conf)); } public BaseSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException { try { this.conf = conf; this.db = db; rootTasks = new ArrayList<Task<? extends Serializable>>(); LOG = LogFactory.getLog(this.getClass().getName()); console = new LogHelper(LOG); idToTableNameMap = new HashMap<String, String>(); inputs = new LinkedHashSet<ReadEntity>(); outputs = new LinkedHashSet<WriteEntity>(); } catch (Exception e) { throw new SemanticException(e); } } protected static Hive createHiveDB(HiveConf conf) throws SemanticException { try { return Hive.get(conf); } catch (HiveException e) { throw new SemanticException(e); } } public HashMap<String, String> getIdToTableNameMap() { return idToTableNameMap; } public abstract void analyzeInternal(ASTNode ast) throws SemanticException; public void init(boolean clearPartsCache) { //no-op } public void initCtx(Context ctx) { this.ctx = ctx; } public void analyze(ASTNode ast, Context ctx) throws SemanticException { initCtx(ctx); init(true); analyzeInternal(ast); } public void validate() throws SemanticException { // Implementations may choose to override this } public List<Task<? extends Serializable>> getRootTasks() { return rootTasks; } /** * @return the fetchTask */ public FetchTask getFetchTask() { return fetchTask; } /** * @param fetchTask * the fetchTask to set */ public void setFetchTask(FetchTask fetchTask) { this.fetchTask = fetchTask; } protected void reset(boolean clearPartsCache) { rootTasks = new ArrayList<Task<? extends Serializable>>(); } public static String stripIdentifierQuotes(String val) { if ((val.charAt(0) == '`' && val.charAt(val.length() - 1) == '`')) { val = val.substring(1, val.length() - 1); } return val; } public static String stripQuotes(String val) { return PlanUtils.stripQuotes(val); } public static String charSetString(String charSetName, String charSetString) throws SemanticException { try { // The character set name starts with a _, so strip that charSetName = charSetName.substring(1); if (charSetString.charAt(0) == '\'') { return new String(unescapeSQLString(charSetString).getBytes(), charSetName); } else // hex input is also supported { assert charSetString.charAt(0) == '0'; assert charSetString.charAt(1) == 'x'; charSetString = charSetString.substring(2); byte[] bArray = new byte[charSetString.length() / 2]; int j = 0; for (int i = 0; i < charSetString.length(); i += 2) { int val = Character.digit(charSetString.charAt(i), 16) * 16 + Character.digit(charSetString.charAt(i + 1), 16); if (val > 127) { val = val - 256; } bArray[j++] = (byte)val; } String res = new String(bArray, charSetName); return res; } } catch (UnsupportedEncodingException e) { throw new SemanticException(e); } } /** * Get dequoted name from a table/column node. * @param tableOrColumnNode the table or column node * @return for table node, db.tab or tab. for column node column. */ public static String getUnescapedName(ASTNode tableOrColumnNode) { return getUnescapedName(tableOrColumnNode, null); } public static String getUnescapedName(ASTNode tableOrColumnNode, String currentDatabase) { int tokenType = tableOrColumnNode.getToken().getType(); if (tokenType == HiveParser.TOK_TABNAME) { // table node if (tableOrColumnNode.getChildCount() == 2) { String dbName = unescapeIdentifier(tableOrColumnNode.getChild(0).getText()); String tableName = unescapeIdentifier(tableOrColumnNode.getChild(1).getText()); return dbName + "." + tableName; } String tableName = unescapeIdentifier(tableOrColumnNode.getChild(0).getText()); if (currentDatabase != null) { return currentDatabase + "." + tableName; } return tableName; } else if (tokenType == HiveParser.StringLiteral) { return unescapeSQLString(tableOrColumnNode.getText()); } // column node return unescapeIdentifier(tableOrColumnNode.getText()); } public static String[] getQualifiedTableName(ASTNode tabNameNode) throws SemanticException { if (tabNameNode.getType() != HiveParser.TOK_TABNAME || (tabNameNode.getChildCount() != 1 && tabNameNode.getChildCount() != 2)) { throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME.getMsg(tabNameNode)); } if (tabNameNode.getChildCount() == 2) { String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); return new String[] {dbName, tableName}; } String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); return Utilities.getDbTableName(tableName); } public static String getDotName(String[] qname) throws SemanticException { String genericName = StringUtils.join(qname, "."); if (qname.length != 2) { throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, genericName); } return genericName; } /** * Get the unqualified name from a table node. * * This method works for table names qualified with their schema (e.g., "db.table") * and table names without schema qualification. In both cases, it returns * the table name without the schema. * * @param node the table node * @return the table name without schema qualification * (i.e., if name is "db.table" or "table", returns "table") */ public static String getUnescapedUnqualifiedTableName(ASTNode node) { assert node.getChildCount() <= 2; if (node.getChildCount() == 2) { node = (ASTNode) node.getChild(1); } return getUnescapedName(node); } /** * Remove the encapsulating "`" pair from the identifier. We allow users to * use "`" to escape identifier for table names, column names and aliases, in * case that coincide with Hive language keywords. */ public static String unescapeIdentifier(String val) { if (val == null) { return null; } if (val.charAt(0) == '`' && val.charAt(val.length() - 1) == '`') { val = val.substring(1, val.length() - 1); } return val; } /** * Converts parsed key/value properties pairs into a map. * * @param prop ASTNode parent of the key/value pairs * * @param mapProp property map which receives the mappings */ public static void readProps( ASTNode prop, Map<String, String> mapProp) { for (int propChild = 0; propChild < prop.getChildCount(); propChild++) { String key = unescapeSQLString(prop.getChild(propChild).getChild(0) .getText()); String value = null; if (prop.getChild(propChild).getChild(1) != null) { value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText()); } mapProp.put(key, value); } } private static final int[] multiplier = new int[] {1000, 100, 10, 1}; @SuppressWarnings("nls") public static String unescapeSQLString(String b) { Character enclosure = null; // Some of the strings can be passed in as unicode. For example, the // delimiter can be passed in as \002 - So, we first check if the // string is a unicode number, else go back to the old behavior StringBuilder sb = new StringBuilder(b.length()); for (int i = 0; i < b.length(); i++) { char currentChar = b.charAt(i); if (enclosure == null) { if (currentChar == '\'' || b.charAt(i) == '\"') { enclosure = currentChar; } // ignore all other chars outside the enclosure continue; } if (enclosure.equals(currentChar)) { enclosure = null; continue; } if (currentChar == '\\' && (i + 6 < b.length()) && b.charAt(i + 1) == 'u') { int code = 0; int base = i + 2; for (int j = 0; j < 4; j++) { int digit = Character.digit(b.charAt(j + base), 16); code += digit * multiplier[j]; } sb.append((char)code); i += 5; continue; } if (currentChar == '\\' && (i + 4 < b.length())) { char i1 = b.charAt(i + 1); char i2 = b.charAt(i + 2); char i3 = b.charAt(i + 3); if ((i1 >= '0' && i1 <= '1') && (i2 >= '0' && i2 <= '7') && (i3 >= '0' && i3 <= '7')) { byte bVal = (byte) ((i3 - '0') + ((i2 - '0') * 8) + ((i1 - '0') * 8 * 8)); byte[] bValArr = new byte[1]; bValArr[0] = bVal; String tmp = new String(bValArr); sb.append(tmp); i += 3; continue; } } if (currentChar == '\\' && (i + 2 < b.length())) { char n = b.charAt(i + 1); switch (n) { case '0': sb.append("\0"); break; case '\'': sb.append("'"); break; case '"': sb.append("\""); break; case 'b': sb.append("\b"); break; case 'n': sb.append("\n"); break; case 'r': sb.append("\r"); break; case 't': sb.append("\t"); break; case 'Z': sb.append("\u001A"); break; case '\\': sb.append("\\"); break; // The following 2 lines are exactly what MySQL does TODO: why do we do this? case '%': sb.append("\\%"); break; case '_': sb.append("\\_"); break; default: sb.append(n); } i++; } else { sb.append(currentChar); } } return sb.toString(); } /** * Escapes the string for AST; doesn't enclose it in quotes, however. */ public static String escapeSQLString(String b) { // There's usually nothing to escape so we will be optimistic. String result = b; for (int i = 0; i < result.length(); ++i) { char currentChar = result.charAt(i); if (currentChar == '\\' && ((i + 1) < result.length())) { // TODO: do we need to handle the "this is what MySQL does" here? char nextChar = result.charAt(i + 1); if (nextChar == '%' || nextChar == '_') { ++i; continue; } } switch (currentChar) { case '\0': result = spliceString(result, i, "\\0"); ++i; break; case '\'': result = spliceString(result, i, "\\'"); ++i; break; case '\"': result = spliceString(result, i, "\\\""); ++i; break; case '\b': result = spliceString(result, i, "\\b"); ++i; break; case '\n': result = spliceString(result, i, "\\n"); ++i; break; case '\r': result = spliceString(result, i, "\\r"); ++i; break; case '\t': result = spliceString(result, i, "\\t"); ++i; break; case '\\': result = spliceString(result, i, "\\\\"); ++i; break; case '\u001A': result = spliceString(result, i, "\\Z"); ++i; break; default: { if (currentChar < ' ') { String hex = Integer.toHexString(currentChar); String unicode = "\\u"; for (int j = 4; j > hex.length(); --j) { unicode += '0'; } unicode += hex; result = spliceString(result, i, unicode); i += (unicode.length() - 1); } break; // if not a control character, do nothing } } } return result; } private static String spliceString(String str, int i, String replacement) { return spliceString(str, i, 1, replacement); } private static String spliceString(String str, int i, int length, String replacement) { return str.substring(0, i) + replacement + str.substring(i + length); } public HashSet<ReadEntity> getInputs() { return inputs; } public HashSet<WriteEntity> getOutputs() { return outputs; } /** * @return the schema for the fields which will be produced * when the statement is executed, or null if not known */ public List<FieldSchema> getResultSchema() { return null; } protected List<FieldSchema> getColumns(ASTNode ast) throws SemanticException { return getColumns(ast, true); } /** * Get the list of FieldSchema out of the ASTNode. */ public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase) throws SemanticException { List<FieldSchema> colList = new ArrayList<FieldSchema>(); int numCh = ast.getChildCount(); for (int i = 0; i < numCh; i++) { FieldSchema col = new FieldSchema(); ASTNode child = (ASTNode) ast.getChild(i); Tree grandChild = child.getChild(0); if(grandChild != null) { String name = grandChild.getText(); if(lowerCase) { name = name.toLowerCase(); } // child 0 is the name of the column col.setName(unescapeIdentifier(name)); // child 1 is the type of the column ASTNode typeChild = (ASTNode) (child.getChild(1)); col.setType(getTypeStringFromAST(typeChild)); // child 2 is the optional comment of the column if (child.getChildCount() == 3) { col.setComment(unescapeSQLString(child.getChild(2).getText())); } } colList.add(col); } return colList; } public static List<String> getColumnNames(ASTNode ast) { List<String> colList = new ArrayList<String>(); int numCh = ast.getChildCount(); for (int i = 0; i < numCh; i++) { ASTNode child = (ASTNode) ast.getChild(i); colList.add(unescapeIdentifier(child.getText()).toLowerCase()); } return colList; } protected List<Order> getColumnNamesOrder(ASTNode ast) { List<Order> colList = new ArrayList<Order>(); int numCh = ast.getChildCount(); for (int i = 0; i < numCh; i++) { ASTNode child = (ASTNode) ast.getChild(i); if (child.getToken().getType() == HiveParser.TOK_TABSORTCOLNAMEASC) { colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()).toLowerCase(), HIVE_COLUMN_ORDER_ASC)); } else { colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()).toLowerCase(), HIVE_COLUMN_ORDER_DESC)); } } return colList; } protected static String getTypeStringFromAST(ASTNode typeNode) throws SemanticException { switch (typeNode.getType()) { case HiveParser.TOK_LIST: return serdeConstants.LIST_TYPE_NAME + "<" + getTypeStringFromAST((ASTNode) typeNode.getChild(0)) + ">"; case HiveParser.TOK_MAP: return serdeConstants.MAP_TYPE_NAME + "<" + getTypeStringFromAST((ASTNode) typeNode.getChild(0)) + "," + getTypeStringFromAST((ASTNode) typeNode.getChild(1)) + ">"; case HiveParser.TOK_STRUCT: return getStructTypeStringFromAST(typeNode); case HiveParser.TOK_UNIONTYPE: return getUnionTypeStringFromAST(typeNode); default: return DDLSemanticAnalyzer.getTypeName(typeNode); } } private static String getStructTypeStringFromAST(ASTNode typeNode) throws SemanticException { String typeStr = serdeConstants.STRUCT_TYPE_NAME + "<"; typeNode = (ASTNode) typeNode.getChild(0); int children = typeNode.getChildCount(); if (children <= 0) { throw new SemanticException("empty struct not allowed."); } StringBuilder buffer = new StringBuilder(typeStr); for (int i = 0; i < children; i++) { ASTNode child = (ASTNode) typeNode.getChild(i); buffer.append(unescapeIdentifier(child.getChild(0).getText())).append(":"); buffer.append(getTypeStringFromAST((ASTNode) child.getChild(1))); if (i < children - 1) { buffer.append(","); } } buffer.append(">"); return buffer.toString(); } private static String getUnionTypeStringFromAST(ASTNode typeNode) throws SemanticException { String typeStr = serdeConstants.UNION_TYPE_NAME + "<"; typeNode = (ASTNode) typeNode.getChild(0); int children = typeNode.getChildCount(); if (children <= 0) { throw new SemanticException("empty union not allowed."); } StringBuilder buffer = new StringBuilder(typeStr); for (int i = 0; i < children; i++) { buffer.append(getTypeStringFromAST((ASTNode) typeNode.getChild(i))); if (i < children - 1) { buffer.append(","); } } buffer.append(">"); typeStr = buffer.toString(); return typeStr; } /** * tableSpec. * */ public static class tableSpec { public String tableName; public Table tableHandle; public Map<String, String> partSpec; // has to use LinkedHashMap to enforce order public Partition partHandle; public int numDynParts; // number of dynamic partition columns public List<Partition> partitions; // involved partitions in TableScanOperator/FileSinkOperator public static enum SpecType {TABLE_ONLY, STATIC_PARTITION, DYNAMIC_PARTITION}; public SpecType specType; public tableSpec(Hive db, HiveConf conf, ASTNode ast) throws SemanticException { this(db, conf, ast, true, false); } public tableSpec(Hive db, HiveConf conf, String tableName, Map<String, String> partSpec) throws HiveException { this.tableName = tableName; this.partSpec = partSpec; this.tableHandle = db.getTable(tableName); if (partSpec != null) { this.specType = SpecType.STATIC_PARTITION; this.partHandle = db.getPartition(tableHandle, partSpec, false); this.partitions = Arrays.asList(partHandle); } else { this.specType = SpecType.TABLE_ONLY; } } public tableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartitionsSpec, boolean allowPartialPartitionsSpec) throws SemanticException { assert (ast.getToken().getType() == HiveParser.TOK_TAB || ast.getToken().getType() == HiveParser.TOK_TABLE_PARTITION || ast.getToken().getType() == HiveParser.TOK_TABTYPE || ast.getToken().getType() == HiveParser.TOK_CREATETABLE); int childIndex = 0; numDynParts = 0; try { // get table metadata tableName = getUnescapedName((ASTNode)ast.getChild(0)); boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE); if (testMode) { tableName = conf.getVar(HiveConf.ConfVars.HIVETESTMODEPREFIX) + tableName; } if (ast.getToken().getType() != HiveParser.TOK_CREATETABLE) { tableHandle = db.getTable(tableName); } } catch (InvalidTableException ite) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(ast .getChild(0)), ite); } catch (HiveException e) { throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(ast .getChild(childIndex), e.getMessage()), e); } // get partition metadata if partition specified if (ast.getChildCount() == 2 && ast.getToken().getType() != HiveParser.TOK_CREATETABLE) { childIndex = 1; ASTNode partspec = (ASTNode) ast.getChild(1); partitions = new ArrayList<Partition>(); // partSpec is a mapping from partition column name to its value. Map<String, String> tmpPartSpec = new HashMap<String, String>(partspec.getChildCount()); for (int i = 0; i < partspec.getChildCount(); ++i) { ASTNode partspec_val = (ASTNode) partspec.getChild(i); String val = null; String colName = unescapeIdentifier(partspec_val.getChild(0).getText().toLowerCase()); if (partspec_val.getChildCount() < 2) { // DP in the form of T partition (ds, hr) if (allowDynamicPartitionsSpec) { ++numDynParts; } else { throw new SemanticException(ErrorMsg.INVALID_PARTITION .getMsg(" - Dynamic partitions not allowed")); } } else { // in the form of T partition (ds="2010-03-03") val = stripQuotes(partspec_val.getChild(1).getText()); } tmpPartSpec.put(colName, val); } // check if the columns, as well as value types in the partition() clause are valid validatePartSpec(tableHandle, tmpPartSpec, ast, conf, false); List<FieldSchema> parts = tableHandle.getPartitionKeys(); partSpec = new LinkedHashMap<String, String>(partspec.getChildCount()); for (FieldSchema fs : parts) { String partKey = fs.getName(); partSpec.put(partKey, tmpPartSpec.get(partKey)); } // check if the partition spec is valid if (numDynParts > 0) { int numStaPart = parts.size() - numDynParts; if (numStaPart == 0 && conf.getVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE).equalsIgnoreCase("strict")) { throw new SemanticException(ErrorMsg.DYNAMIC_PARTITION_STRICT_MODE.getMsg()); } // check the partitions in partSpec be the same as defined in table schema if (partSpec.keySet().size() != parts.size()) { ErrorPartSpec(partSpec, parts); } Iterator<String> itrPsKeys = partSpec.keySet().iterator(); for (FieldSchema fs: parts) { if (!itrPsKeys.next().toLowerCase().equals(fs.getName().toLowerCase())) { ErrorPartSpec(partSpec, parts); } } // check if static partition appear after dynamic partitions for (FieldSchema fs: parts) { if (partSpec.get(fs.getName().toLowerCase()) == null) { if (numStaPart > 0) { // found a DP, but there exists ST as subpartition throw new SemanticException( ErrorMsg.PARTITION_DYN_STA_ORDER.getMsg(ast.getChild(childIndex))); } break; } else { --numStaPart; } } partHandle = null; specType = SpecType.DYNAMIC_PARTITION; } else { try { if (allowPartialPartitionsSpec) { partitions = db.getPartitions(tableHandle, partSpec); } else { // this doesn't create partition. partHandle = db.getPartition(tableHandle, partSpec, false); if (partHandle == null) { // if partSpec doesn't exists in DB, return a delegate one // and the actual partition is created in MoveTask partHandle = new Partition(tableHandle, partSpec, null); } else { partitions.add(partHandle); } } } catch (HiveException e) { throw new SemanticException( ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(childIndex)), e); } specType = SpecType.STATIC_PARTITION; } } else { specType = SpecType.TABLE_ONLY; } } public Map<String, String> getPartSpec() { return this.partSpec; } public void setPartSpec(Map<String, String> partSpec) { this.partSpec = partSpec; } @Override public String toString() { if (partHandle != null) { return partHandle.toString(); } else { return tableHandle.toString(); } } } /** * Gets the lineage information. * * @return LineageInfo associated with the query. */ public LineageInfo getLineageInfo() { return linfo; } /** * Sets the lineage information. * * @param linfo The LineageInfo structure that is set in the optimization phase. */ public void setLineageInfo(LineageInfo linfo) { this.linfo = linfo; } /** * Gets the table access information. * * @return TableAccessInfo associated with the query. */ public TableAccessInfo getTableAccessInfo() { return tableAccessInfo; } /** * Sets the table access information. * * @param tableAccessInfo The TableAccessInfo structure that is set in the optimization phase. */ public void setTableAccessInfo(TableAccessInfo tableAccessInfo) { this.tableAccessInfo = tableAccessInfo; } /** * Gets the column access information. * * @return ColumnAccessInfo associated with the query. */ public ColumnAccessInfo getColumnAccessInfo() { return columnAccessInfo; } /** * Sets the column access information. * * @param columnAccessInfo The ColumnAccessInfo structure that is set immediately after * the optimization phase. */ public void setColumnAccessInfo(ColumnAccessInfo columnAccessInfo) { this.columnAccessInfo = columnAccessInfo; } public ColumnAccessInfo getUpdateColumnAccessInfo() { return updateColumnAccessInfo; } public void setUpdateColumnAccessInfo(ColumnAccessInfo updateColumnAccessInfo) { this.updateColumnAccessInfo = updateColumnAccessInfo; } protected LinkedHashMap<String, String> extractPartitionSpecs(Tree partspec) throws SemanticException { LinkedHashMap<String, String> partSpec = new LinkedHashMap<String, String>(); for (int i = 0; i < partspec.getChildCount(); ++i) { CommonTree partspec_val = (CommonTree) partspec.getChild(i); String val = stripQuotes(partspec_val.getChild(1).getText()); partSpec.put(partspec_val.getChild(0).getText().toLowerCase(), val); } return partSpec; } /** * Checks if given specification is proper specification for prefix of * partition cols, for table partitioned by ds, hr, min valid ones are * (ds='2008-04-08'), (ds='2008-04-08', hr='12'), (ds='2008-04-08', hr='12', min='30') * invalid one is for example (ds='2008-04-08', min='30') * @param spec specification key-value map * @return true if the specification is prefix; never returns false, but throws * @throws HiveException */ public final boolean isValidPrefixSpec(Table tTable, Map<String, String> spec) throws HiveException { // TODO - types need to be checked. List<FieldSchema> partCols = tTable.getPartitionKeys(); if (partCols == null || (partCols.size() == 0)) { if (spec != null) { throw new HiveException( "table is not partitioned but partition spec exists: " + spec); } else { return true; } } if (spec == null) { throw new HiveException("partition spec is not specified"); } Iterator<String> itrPsKeys = spec.keySet().iterator(); for (FieldSchema fs: partCols) { if(!itrPsKeys.hasNext()) { break; } if (!itrPsKeys.next().toLowerCase().equals( fs.getName().toLowerCase())) { ErrorPartSpec(spec, partCols); } } if(itrPsKeys.hasNext()) { ErrorPartSpec(spec, partCols); } return true; } private static void ErrorPartSpec(Map<String, String> partSpec, List<FieldSchema> parts) throws SemanticException { StringBuilder sb = new StringBuilder( "Partition columns in the table schema are: ("); for (FieldSchema fs : parts) { sb.append(fs.getName()).append(", "); } sb.setLength(sb.length() - 2); // remove the last ", " sb.append("), while the partitions specified in the query are: ("); Iterator<String> itrPsKeys = partSpec.keySet().iterator(); while (itrPsKeys.hasNext()) { sb.append(itrPsKeys.next()).append(", "); } sb.setLength(sb.length() - 2); // remove the last ", " sb.append(")."); throw new SemanticException(ErrorMsg.PARTSPEC_DIFFER_FROM_SCHEMA .getMsg(sb.toString())); } public Hive getDb() { return db; } public QueryProperties getQueryProperties() { return queryProperties; } public Set<FileSinkDesc> getAcidFileSinks() { return acidFileSinks; } /** * Construct list bucketing context. * * @param skewedColNames * @param skewedValues * @param skewedColValueLocationMaps * @param isStoredAsSubDirectories * @return */ protected ListBucketingCtx constructListBucketingCtx(List<String> skewedColNames, List<List<String>> skewedValues, Map<List<String>, String> skewedColValueLocationMaps, boolean isStoredAsSubDirectories, HiveConf conf) { ListBucketingCtx lbCtx = new ListBucketingCtx(); lbCtx.setSkewedColNames(skewedColNames); lbCtx.setSkewedColValues(skewedValues); lbCtx.setLbLocationMap(skewedColValueLocationMaps); lbCtx.setStoredAsSubDirectories(isStoredAsSubDirectories); lbCtx.setDefaultKey(ListBucketingPrunerUtils.HIVE_LIST_BUCKETING_DEFAULT_KEY); lbCtx.setDefaultDirName(ListBucketingPrunerUtils.HIVE_LIST_BUCKETING_DEFAULT_DIR_NAME); return lbCtx; } /** * Given a ASTNode, return list of values. * * use case: * create table xyz list bucketed (col1) with skew (1,2,5) * AST Node is for (1,2,5) * @param ast * @return */ protected List<String> getSkewedValueFromASTNode(ASTNode ast) { List<String> colList = new ArrayList<String>(); int numCh = ast.getChildCount(); for (int i = 0; i < numCh; i++) { ASTNode child = (ASTNode) ast.getChild(i); colList.add(stripQuotes(child.getText()).toLowerCase()); } return colList; } /** * Retrieve skewed values from ASTNode. * * @param node * @return * @throws SemanticException */ protected List<String> getSkewedValuesFromASTNode(Node node) throws SemanticException { List<String> result = null; Tree leafVNode = ((ASTNode) node).getChild(0); if (leafVNode == null) { throw new SemanticException( ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); } else { ASTNode lVAstNode = (ASTNode) leafVNode; if (lVAstNode.getToken().getType() != HiveParser.TOK_TABCOLVALUE) { throw new SemanticException( ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); } else { result = new ArrayList<String>(getSkewedValueFromASTNode(lVAstNode)); } } return result; } /** * Analyze list bucket column names * * @param skewedColNames * @param child * @return * @throws SemanticException */ protected List<String> analyzeSkewedTablDDLColNames(List<String> skewedColNames, ASTNode child) throws SemanticException { Tree nNode = child.getChild(0); if (nNode == null) { throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg()); } else { ASTNode nAstNode = (ASTNode) nNode; if (nAstNode.getToken().getType() != HiveParser.TOK_TABCOLNAME) { throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg()); } else { skewedColNames = getColumnNames(nAstNode); } } return skewedColNames; } /** * Handle skewed values in DDL. * * It can be used by both skewed by ... on () and set skewed location (). * * @param skewedValues * @param child * @throws SemanticException */ protected void analyzeDDLSkewedValues(List<List<String>> skewedValues, ASTNode child) throws SemanticException { Tree vNode = child.getChild(1); if (vNode == null) { throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); } ASTNode vAstNode = (ASTNode) vNode; switch (vAstNode.getToken().getType()) { case HiveParser.TOK_TABCOLVALUE: for (String str : getSkewedValueFromASTNode(vAstNode)) { List<String> sList = new ArrayList<String>(Arrays.asList(str)); skewedValues.add(sList); } break; case HiveParser.TOK_TABCOLVALUE_PAIR: ArrayList<Node> vLNodes = vAstNode.getChildren(); for (Node node : vLNodes) { if ( ((ASTNode) node).getToken().getType() != HiveParser.TOK_TABCOLVALUES) { throw new SemanticException( ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); } else { skewedValues.add(getSkewedValuesFromASTNode(node)); } } break; default: break; } } /** * process stored as directories * * @param child * @return */ protected boolean analyzeStoredAdDirs(ASTNode child) { boolean storedAsDirs = false; if ((child.getChildCount() == 3) && (((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.TOK_STOREDASDIRS)) { storedAsDirs = true; } return storedAsDirs; } private static boolean getPartExprNodeDesc(ASTNode astNode, Map<ASTNode, ExprNodeDesc> astExprNodeMap) throws SemanticException { if (astNode == null) { return true; } else if ((astNode.getChildren() == null) || (astNode.getChildren().size() == 0)) { return astNode.getType() != HiveParser.TOK_PARTVAL; } TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null); boolean result = true; for (Node childNode : astNode.getChildren()) { ASTNode childASTNode = (ASTNode)childNode; if (childASTNode.getType() != HiveParser.TOK_PARTVAL) { result = getPartExprNodeDesc(childASTNode, astExprNodeMap) && result; } else { boolean isDynamicPart = childASTNode.getChildren().size() <= 1; result = !isDynamicPart && result; if (!isDynamicPart) { ASTNode partVal = (ASTNode)childASTNode.getChildren().get(1); astExprNodeMap.put((ASTNode)childASTNode.getChildren().get(0), TypeCheckProcFactory.genExprNode(partVal, typeCheckCtx).get(partVal)); } } } return result; } public static void validatePartSpec(Table tbl, Map<String, String> partSpec, ASTNode astNode, HiveConf conf, boolean shouldBeFull) throws SemanticException { tbl.validatePartColumnNames(partSpec, shouldBeFull); if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_TYPE_CHECK_ON_INSERT)) { return; } Map<ASTNode, ExprNodeDesc> astExprNodeMap = new HashMap<ASTNode, ExprNodeDesc>(); if (!getPartExprNodeDesc(astNode, astExprNodeMap)) { STATIC_LOG.warn("Dynamic partitioning is used; only validating " + astExprNodeMap.size() + " columns"); } if (astExprNodeMap.isEmpty()) { return; // All columns are dynamic, nothing to do. } List<FieldSchema> parts = tbl.getPartitionKeys(); Map<String, String> partCols = new HashMap<String, String>(parts.size()); for (FieldSchema col : parts) { partCols.put(col.getName(), col.getType().toLowerCase()); } for (Entry<ASTNode, ExprNodeDesc> astExprNodePair : astExprNodeMap.entrySet()) { String astKeyName = astExprNodePair.getKey().toString().toLowerCase(); if (astExprNodePair.getKey().getType() == HiveParser.Identifier) { astKeyName = stripIdentifierQuotes(astKeyName); } String colType = partCols.get(astKeyName); ObjectInspector inputOI = astExprNodePair.getValue().getWritableObjectInspector(); TypeInfo expectedType = TypeInfoUtils.getTypeInfoFromTypeString(colType); ObjectInspector outputOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(expectedType); Object value = null; String colSpec = partSpec.get(astKeyName); try { value = ExprNodeEvaluatorFactory.get(astExprNodePair.getValue()). evaluate(colSpec); } catch (HiveException e) { throw new SemanticException(e); } Object convertedValue = ObjectInspectorConverters.getConverter(inputOI, outputOI).convert(value); if (convertedValue == null) { throw new SemanticException(ErrorMsg.PARTITION_SPEC_TYPE_MISMATCH, astKeyName, inputOI.getTypeName(), outputOI.getTypeName()); } normalizeColSpec(partSpec, astKeyName, colType, colSpec, convertedValue); } } @VisibleForTesting static void normalizeColSpec(Map<String, String> partSpec, String colName, String colType, String originalColSpec, Object colValue) throws SemanticException { if (colValue == null) return; // nothing to do with nulls String normalizedColSpec = originalColSpec; if (colType.equals(serdeConstants.DATE_TYPE_NAME)) { normalizedColSpec = normalizeDateCol(colValue, originalColSpec); } if (!normalizedColSpec.equals(originalColSpec)) { STATIC_LOG.warn("Normalizing partition spec - " + colName + " from " + originalColSpec + " to " + normalizedColSpec); partSpec.put(colName, normalizedColSpec); } } private static String normalizeDateCol( Object colValue, String originalColSpec) throws SemanticException { Date value; if (colValue instanceof DateWritable) { value = ((DateWritable) colValue).get(); } else if (colValue instanceof Date) { value = (Date) colValue; } else { throw new SemanticException("Unexpected date type " + colValue.getClass()); } return HiveMetaStore.PARTITION_DATE_FORMAT.get().format(value); } protected WriteEntity toWriteEntity(String location) throws SemanticException { return toWriteEntity(new Path(location)); } protected WriteEntity toWriteEntity(Path location) throws SemanticException { try { Path path = tryQualifyPath(location); return new WriteEntity(path, FileUtils.isLocalFile(conf, path.toUri())); } catch (Exception e) { throw new SemanticException(e); } } protected ReadEntity toReadEntity(String location) throws SemanticException { return toReadEntity(new Path(location)); } protected ReadEntity toReadEntity(Path location) throws SemanticException { try { Path path = tryQualifyPath(location); return new ReadEntity(path, FileUtils.isLocalFile(conf, path.toUri())); } catch (Exception e) { throw new SemanticException(e); } } private Path tryQualifyPath(Path path) throws IOException { try { return path.getFileSystem(conf).makeQualified(path); } catch (IOException e) { return path; // some tests expected to pass invalid schema } } protected Database getDatabase(String dbName) throws SemanticException { return getDatabase(dbName, true); } protected Database getDatabase(String dbName, boolean throwException) throws SemanticException { Database database; try { database = db.getDatabase(dbName); } catch (Exception e) { throw new SemanticException(e.getMessage(), e); } if (database == null && throwException) { throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName)); } return database; } protected Table getTable(String[] qualified) throws SemanticException { return getTable(qualified[0], qualified[1], true); } protected Table getTable(String[] qualified, boolean throwException) throws SemanticException { return getTable(qualified[0], qualified[1], throwException); } protected Table getTable(String tblName) throws SemanticException { return getTable(null, tblName, true); } protected Table getTable(String tblName, boolean throwException) throws SemanticException { return getTable(null, tblName, throwException); } protected Table getTable(String database, String tblName, boolean throwException) throws SemanticException { Table tab; try { tab = database == null ? db.getTable(tblName, false) : db.getTable(database, tblName, false); } catch (InvalidTableException e) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName), e); } catch (Exception e) { throw new SemanticException(e.getMessage(), e); } if (tab == null && throwException) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); } return tab; } protected Partition getPartition(Table table, Map<String, String> partSpec, boolean throwException) throws SemanticException { Partition partition; try { partition = db.getPartition(table, partSpec, false); } catch (Exception e) { throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e); } if (partition == null && throwException) { throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); } return partition; } protected List<Partition> getPartitions(Table table, Map<String, String> partSpec, boolean throwException) throws SemanticException { List<Partition> partitions; try { partitions = partSpec == null ? db.getPartitions(table) : db.getPartitions(table, partSpec); } catch (Exception e) { throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e); } if (partitions.isEmpty() && throwException) { throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); } return partitions; } protected String toMessage(ErrorMsg message, Object detail) { return detail == null ? message.getMsg() : message.getMsg(detail.toString()); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.yaml.schema; import com.intellij.codeInspection.InspectionProfileEntry; import com.intellij.openapi.application.ex.PathManagerEx; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.Predicate; import com.jetbrains.jsonSchema.JsonSchemaHighlightingTestBase; import org.intellij.lang.annotations.Language; import org.jetbrains.annotations.NotNull; import org.jetbrains.yaml.YAMLLanguage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class YamlByJsonSchemaHighlightingTest extends JsonSchemaHighlightingTestBase { @NotNull @Override public String getTestDataPath() { return PathManagerEx.getCommunityHomePath() + "/plugins/yaml/testSrc/org/jetbrains/yaml/schema/data/highlighting"; } @Override protected String getTestFileName() { return "config.yml"; } @Override protected InspectionProfileEntry getInspectionProfile() { return new YamlJsonSchemaHighlightingInspection(); } @Override protected Predicate<VirtualFile> getAvailabilityPredicate() { return file -> file.getFileType() instanceof LanguageFileType && ((LanguageFileType)file.getFileType()).getLanguage().isKindOf( YAMLLanguage.INSTANCE); } public void testEnum1() { @Language("JSON") final String schema = "{\"properties\": {\"prop\": {\"enum\": [1,2,3,\"18\"]}}}"; doTest(schema, "prop: 1"); doTest(schema, "prop: <warning>foo</warning>"); } public void testMissingProp() { @Language("JSON") final String schema = "{\"properties\": {\"prop\": {}, \"flop\": {}}, \"required\": [\"flop\"]}"; doTest(schema, "<warning>prop: 2</warning>"); doTest(schema, "prop: 2\nflop: a"); doTest(schema, "flop: a"); } public void testNumberMultipleWrong() { doTest("{ \"properties\": { \"prop\": {\"type\": \"number\", \"multipleOf\": 2}}}", "prop: <warning descr=\"Schema validation: Is not multiple of 2\">3</warning>"); } public void testNumberMultipleCorrect() { doTest("{ \"properties\": { \"prop\": {\"type\": \"number\", \"multipleOf\": 2}}}", "prop: 4"); } public void testNumberMinMax() { doTest("{ \"properties\": { \"prop\": {\n" + " \"type\": \"number\",\n" + " \"minimum\": 0,\n" + " \"maximum\": 100,\n" + " \"exclusiveMaximum\": true\n" + "}}}", "prop: 14"); } public void testEnum() { @Language("JSON") final String schema = "{\"properties\": {\"prop\": {\"enum\": [1,2,3,\"18\"]}}}"; doTest(schema, "prop: 18"); doTest(schema, "prop: 2"); doTest(schema, "prop: <warning descr=\"Schema validation: Value should be one of: 1, 2, 3, \\\"18\\\"\">6</warning>"); } public void testSimpleString() { @Language("JSON") final String schema = "{\"properties\": {\"prop\": {\"type\": \"string\", \"minLength\": 2, \"maxLength\": 3}}}"; doTest(schema, "prop: <warning descr=\"Schema validation: String is shorter than 2\">s</warning>"); doTest(schema, "prop: sh"); doTest(schema, "prop: sho"); doTest(schema, "prop: <warning descr=\"Schema validation: String is longer than 3\">shor</warning>"); } public void testArray() { @Language("JSON") final String schema = schema("{\n" + " \"type\": \"array\",\n" + " \"items\": {\n" + " \"type\": \"number\", \"minimum\": 18" + " }\n" + "}"); doTest(schema, "prop:\n - 101\n - 102"); doTest(schema, "prop:\n - <warning descr=\"Schema validation: Less than the minimum 18\">16</warning>"); doTest(schema, "prop:\n - <warning descr=\"Schema validation: Incompatible types.\n Required: number. Actual: string.\">test</warning>"); } public void testTopLevelArray() { @Language("JSON") final String schema = "{\n" + " \"type\": \"array\",\n" + " \"items\": {\n" + " \"type\": \"number\", \"minimum\": 18" + " }\n" + "}"; doTest(schema, "- 101\n- 102"); } public void testTopLevelObjectArray() { @Language("JSON") final String schema = "{\n" + " \"type\": \"array\",\n" + " \"items\": {\n" + " \"type\": \"object\", \"properties\": {\"a\": {\"type\": \"number\"}}" + " }\n" + "}"; doTest(schema, "- a: <warning descr=\"Schema validation: Incompatible types.\n Required: number. Actual: boolean.\">true</warning>"); doTest(schema, "- a: 18"); } public void testArrayTuples1() { @Language("JSON") final String schema = schema("{\n" + " \"type\": \"array\",\n" + " \"items\": [{\n" + " \"type\": \"number\", \"minimum\": 18" + " }, {\"type\" : \"string\"}]\n" + "}"); doTest(schema, "prop:\n - 101\n - <warning descr=\"Schema validation: Incompatible types.\n Required: string. Actual: integer.\">102</warning>"); } public void testArrayTuples2() { @Language("JSON") final String schema2 = schema("{\n" + " \"type\": \"array\",\n" + " \"items\": [{\n" + " \"type\": \"number\", \"minimum\": 18" + " }, {\"type\" : \"string\"}],\n" + "\"additionalItems\": false}"); doTest(schema2, "prop:\n - 101\n - <warning descr=\"Schema validation: Incompatible types.\n Required: string. Actual: integer.\">102</warning>\n - <warning descr=\"Schema validation: Additional items are not allowed\">additional</warning>"); } public void testArrayLength() { @Language("JSON") final String schema = schema("{\"type\": \"array\", \"minItems\": 2, \"maxItems\": 3}"); doTest(schema, "prop:\n <warning descr=\"Schema validation: Array is shorter than 2\">- 1</warning>"); doTest(schema, "prop:\n - 1\n - 2"); doTest(schema, "prop:\n <warning descr=\"Schema validation: Array is longer than 3\">- 1\n - 2\n - 3\n - 4</warning>"); } public void testArrayUnique() { @Language("JSON") final String schema = schema("{\"type\": \"array\", \"uniqueItems\": true}"); doTest(schema, "prop:\n - 1\n - 2"); doTest(schema, "prop:\n - <warning descr=\"Schema validation: Item is not unique\">1</warning>\n - 2\n - test\n - <warning descr=\"Schema validation: Item is not unique\">1</warning>"); } public void testMetadataIsOk() { @Language("JSON") final String schema = "{\n" + " \"title\" : \"Match anything\",\n" + " \"description\" : \"This is a schema that matches anything.\",\n" + " \"default\" : \"Default value\"\n" + "}"; doTest(schema, "anything: 1"); } public void testRequiredField() { @Language("JSON") final String schema = "{\"type\": \"object\", \"properties\": {\"a\": {}, \"b\": {}}, \"required\": [\"a\"]}"; doTest(schema, "a: 11"); doTest(schema, "a: 1\nb: true"); doTest(schema, "<warning descr=\"Schema validation: Missing required property 'a'\">b: alarm</warning>"); } public void testInnerRequired() { @Language("JSON") final String schema = schema("{\"type\": \"object\", \"properties\": {\"a\": {}, \"b\": {}}, \"required\": [\"a\"]}"); doTest(schema, "prop:\n a: 11"); doTest(schema, "prop:\n a: 1\n b: true"); doTest(schema, "prop:\n <warning descr=\"Schema validation: Missing required property 'a'\">b: alarm</warning>"); } public void testAdditionalPropertiesAllowed() { @Language("JSON") final String schema = schema("{}"); doTest(schema, "prop:\n q: true\n someStuff: 20"); } public void testAdditionalPropertiesDisabled() { @Language("JSON") final String schema = "{\"type\": \"object\", \"properties\": {\"prop\": {}}, \"additionalProperties\": false}"; // not sure abt inner object doTest(schema, "prop:\n q: true\n<warning descr=\"Schema validation: Property 'someStuff' is not allowed\">someStuff: 20</warning>"); } public void testAdditionalPropertiesSchema() { @Language("JSON") final String schema = "{\"type\": \"object\", \"properties\": {\"a\": {}}," + "\"additionalProperties\": {\"type\": \"number\"}}"; doTest(schema, "a: moo\nb: 5\nc: <warning descr=\"Schema validation: Incompatible types.\n Required: number. Actual: string.\">foo</warning>"); } public void testMinMaxProperties() { @Language("JSON") final String schema = "{\"type\": \"object\", \"minProperties\": 2, \"maxProperties\": 3}"; doTest(schema, "<warning descr=\"Schema validation: Number of properties is less than 2\">a: 3</warning>"); doTest(schema, "a: 1\nb: 5"); doTest(schema, "<warning descr=\"Schema validation: Number of properties is greater than 3\">a: 1\nb: 22\nc: 333\nd: 4444</warning>"); } public void testOneOf() { final List<String> subSchemas = new ArrayList<>(); subSchemas.add("{\"type\": \"number\"}"); subSchemas.add("{\"type\": \"boolean\"}"); @Language("JSON") final String schema = schema("{\"oneOf\": [" + StringUtil.join(subSchemas, ", ") + "]}"); doTest(schema, "prop: 5"); doTest(schema, "prop: true"); doTest(schema, "prop: <warning descr=\"Schema validation: Incompatible types.\n Required one of: boolean, number. Actual: string.\">aaa</warning>"); } public void testOneOfForTwoMatches() { final List<String> subSchemas = new ArrayList<>(); subSchemas.add("{\"type\": \"string\", \"enum\": [\"a\", \"b\"]}"); subSchemas.add("{\"type\": \"string\", \"enum\": [\"a\", \"c\"]}"); @Language("JSON") final String schema = schema("{\"oneOf\": [" + StringUtil.join(subSchemas, ", ") + "]}"); doTest(schema, "prop: b"); doTest(schema, "prop: c"); doTest(schema, "prop: <warning descr=\"Schema validation: Validates to more than one variant\">a</warning>"); } public void testOneOfSelectError() { final List<String> subSchemas = new ArrayList<>(); subSchemas.add("{\"type\": \"string\",\n" + " \"enum\": [\n" + " \"off\", \"warn\", \"error\"\n" + " ]}"); subSchemas.add("{\"type\": \"integer\"}"); @Language("JSON") final String schema = schema("{\"oneOf\": [" + StringUtil.join(subSchemas, ", ") + "]}"); doTest(schema, "prop: off"); doTest(schema, "prop: 12"); doTest(schema, "prop: <warning descr=\"Schema validation: Value should be one of: \\\"off\\\", \\\"warn\\\", \\\"error\\\"\">wrong</warning>"); } public void testAnyOf() { final List<String> subSchemas = new ArrayList<>(); subSchemas.add("{\"type\": \"string\", \"enum\": [\"a\", \"b\"]}"); subSchemas.add("{\"type\": \"string\", \"enum\": [\"a\", \"c\"]}"); @Language("JSON") final String schema = schema("{\"anyOf\": [" + StringUtil.join(subSchemas, ", ") + "]}"); doTest(schema, "prop: b"); doTest(schema, "prop: c"); doTest(schema, "prop: a"); doTest(schema, "prop: <warning descr=\"Schema validation: Value should be one of: \\\"a\\\", \\\"b\\\", \\\"c\\\"\">d</warning>"); } public void testAllOf() { final List<String> subSchemas = new ArrayList<>(); subSchemas.add("{\"type\": \"integer\", \"multipleOf\": 2}"); subSchemas.add("{\"enum\": [1,2,3]}"); @Language("JSON") final String schema = schema("{\"allOf\": [" + StringUtil.join(subSchemas, ", ") + "]}"); doTest(schema, "prop: <warning descr=\"Schema validation: Is not multiple of 2\">1</warning>"); doTest(schema, "prop: <warning descr=\"Schema validation: Value should be one of: 1, 2, 3\">4</warning>"); doTest(schema, "prop: 2"); } // ---- public void testObjectInArray() { @Language("JSON") final String schema = schema("{\"type\": \"array\", \"items\": {\"type\": \"object\"," + "\"properties\": {" + "\"innerType\":{}, \"innerValue\":{}" + "}, \"additionalProperties\": false" + "}}"); doTest(schema, "prop:\n- innerType: aaa\n <warning descr=\"Schema validation: Property 'alien' is not allowed\">alien: bee</warning>"); } public void testObjectDeeperInArray() { final String innerTypeSchema = "{\"properties\": {\"only\": {}}, \"additionalProperties\": false}"; @Language("JSON") final String schema = schema("{\"type\": \"array\", \"items\": {\"type\": \"object\"," + "\"properties\": {" + "\"innerType\":" + innerTypeSchema + "}, \"additionalProperties\": false" + "}}"); doTest(schema, "prop:\n- innerType:\n only: true\n <warning descr=\"Schema validation: Property 'hidden' is not allowed\">hidden: false</warning>"); } public void testInnerObjectPropValueInArray() { @Language("JSON") final String schema = "{\"properties\": {\"prop\": {\"type\": \"array\", \"items\": {\"enum\": [1,2,3]}}}}"; doTest(schema, "prop:\n - 1\n - 3"); doTest(schema, "prop:\n - <warning descr=\"Schema validation: Value should be one of: 1, 2, 3\">out</warning>"); } public void testAllOfProperties() { @Language("JSON") final String schema = "{\"allOf\": [{\"type\": \"object\", \"properties\": {\"first\": {}}}," + " {\"properties\": {\"second\": {\"enum\": [33,44]}}}], \"additionalProperties\": false}"; // doTest(schema, "first: true\nsecond: <warning descr=\"Schema validation: Value should be one of: [33, 44]\">null</warning>"); doTest(schema, "first: true\nsecond: 44\n<warning descr=\"Schema validation: Property 'other' is not allowed\">other: 15</warning>"); doTest(schema, "first: true\nsecond: <warning descr=\"Schema validation: Value should be one of: 33, 44\">12</warning>"); } public void testWithWaySelection() { final String subSchema1 = "{\"enum\": [1,2,3,4,5]}"; final String subSchema2 = "{\"type\": \"array\", \"items\": {\"properties\": {\"kilo\": {}}, \"additionalProperties\": false}}"; @Language("JSON") final String schema = "{\"properties\": {\"prop\": {\"oneOf\": [" + subSchema1 + ", " + subSchema2 + "]}}}"; doTest(schema, "prop:\n - <warning descr=\"Schema validation: Property 'foxtrot' is not allowed\">foxtrot: 15</warning>\n kilo: 20"); } public void testPatternPropertiesHighlighting() { @Language("JSON") final String schema = "{\n" + " \"patternProperties\": {\n" + " \"^A\" : {\n" + " \"type\": \"number\"\n" + " },\n" + " \"B\": {\n" + " \"type\": \"boolean\"\n" + " },\n" + " \"C\": {\n" + " \"enum\": [\"test\", \"em\"]\n" + " }\n" + " }\n" + "}"; doTest(schema, "Abezjana: 2\n" + "Auto: <warning descr=\"Schema validation: Incompatible types.\n Required: number. Actual: string.\">no</warning>\n" + "BAe: <warning descr=\"Schema validation: Incompatible types.\n Required: boolean. Actual: integer.\">22</warning>\n" + "Boloto: <warning descr=\"Schema validation: Incompatible types.\n Required: boolean. Actual: integer.\">2</warning>\n" + "Cyan: <warning descr=\"Schema validation: Value should be one of: \\\"test\\\", \\\"em\\\"\">me</warning>\n"); } public void testPatternPropertiesFromIssue() { @Language("JSON") final String schema = "{\n" + " \"type\": \"object\",\n" + " \"additionalProperties\": false,\n" + " \"patternProperties\": {\n" + " \"p[0-9]\": {\n" + " \"type\": \"string\"\n" + " },\n" + " \"a[0-9]\": {\n" + " \"enum\": [\"auto!\"]\n" + " }\n" + " }\n" + "}"; doTest(schema, "p1: <warning descr=\"Schema validation: Incompatible types.\n Required: string. Actual: integer.\">1</warning>\n" + "p2: <warning descr=\"Schema validation: Incompatible types.\n Required: string. Actual: integer.\">3</warning>\n" + "a2: auto!\n" + "a1: <warning descr=\"Schema validation: Value should be one of: \\\"auto!\\\"\">moto!</warning>\n" ); } public void testPatternForPropertyValue() { @Language("JSON") final String schema = "{\n" + " \"properties\": {\n" + " \"withPattern\": {\n" + " \"pattern\": \"p[0-9]\"\n" + " }\n" + " }\n" + "}"; final String correctText = "withPattern: p1"; final String wrongText = "withPattern: <warning descr=\"Schema validation: String violates the pattern: 'p[0-9]'\">wrong</warning>"; doTest(schema, correctText); doTest(schema, wrongText); } public void testPatternWithSpecialEscapedSymbols() { @Language("JSON") final String schema = "{\n" + " \"properties\": {\n" + " \"withPattern\": {\n" + " \"pattern\": \"^\\\\d{4}\\\\-(0?[1-9]|1[012])\\\\-(0?[1-9]|[12][0-9]|3[01])$\"\n" + " }\n" + " }\n" + "}"; @Language("yaml") final String correctText = "withPattern: 1234-11-11"; final String wrongText = "withPattern: <warning descr=\"Schema validation: String violates the pattern: '^\\d{4}\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$'\">wrong</warning>\n"; doTest(schema, correctText); doTest(schema, wrongText); } // --- public void testRootObjectRedefinedAdditionalPropertiesForbidden() { doTest(rootObjectRedefinedSchema(), "<warning descr=\"Schema validation: Property 'a' is not allowed\">a: true</warning>\n" + "r1: allowed!"); } public void testNumberOfSameNamedPropertiesCorrectlyChecked() { @Language("JSON") final String schema = "{\n" + " \"properties\": {\n" + " \"size\": {\n" + " \"type\": \"object\",\n" + " \"minProperties\": 2,\n" + " \"maxProperties\": 3,\n" + " \"properties\": {\n" + " \"a\": {\n" + " \"type\": \"boolean\"\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}"; doTest(schema, "size: \n" + " a: <warning descr=\"Schema validation: Incompatible types.\n Required: boolean. Actual: integer.\">1</warning>\n" + " b: 3\n" + " c: 4\n" + " a: <warning descr=\"Schema validation: Incompatible types.\n Required: boolean. Actual: integer.\">5</warning>" + "\n"); } public void testManyDuplicatesInArray() { @Language("JSON") final String schema = "{\n" + " \"properties\": {\n" + " \"array\":{\n" + " \"type\": \"array\",\n" + " \"uniqueItems\": true\n" + " }\n" + " }\n" + "}"; doTest(schema, "array: \n" + " - <warning descr=\"Schema validation: Item is not unique\">1</warning>\n" + " - <warning descr=\"Schema validation: Item is not unique\">1</warning>\n" + " - <warning descr=\"Schema validation: Item is not unique\">1</warning>\n" + " - <warning descr=\"Schema validation: Item is not unique\">2</warning>\n" + " - <warning descr=\"Schema validation: Item is not unique\">2</warning>\n" + " - <warning descr=\"Schema validation: Item is not unique\">2</warning>\n" + " - 5\n" + " - <warning descr=\"Schema validation: Item is not unique\">3</warning>\n" + " - <warning descr=\"Schema validation: Item is not unique\">3</warning>\n"); } // ---- public void testPropertyValueAlsoHighlightedIfPatternIsInvalid() { @Language("JSON") final String schema = "{\n" + " \"properties\": {\n" + " \"withPattern\": {\n" + " \"pattern\": \"^[]$\"\n" + " }\n" + " }\n" + "}"; final String text = "withPattern:" + " <warning descr=\"Schema validation: Cannot check the string by pattern because of an error: Unclosed character class near index 3\n^[]$\n ^\">(124)555-4216</warning>"; doTest(schema, text); } public void testNotSchema() { @Language("JSON") final String schema = "{\"properties\": {\n" + " \"not_type\": { \"not\": { \"type\": \"string\" } }\n" + " }}"; doTest(schema, "not_type: <warning descr=\"Schema validation: Validates against 'not' schema\">wrong</warning>"); } public void testNotSchemaCombinedWithNormal() { @Language("JSON") final String schema = "{\"properties\": {\n" + " \"not_type\": {\n" + " \"pattern\": \"^[a-z]*[0-5]*$\",\n" + " \"not\": { \"pattern\": \"^[a-z]{1}[0-5]$\" }\n" + " }\n" + " }}"; doTest(schema, "not_type: va4"); doTest(schema, "not_type: <warning descr=\"Schema validation: Validates against 'not' schema\">a4</warning>"); doTest(schema, "not_type: <warning descr=\"Schema validation: String violates the pattern: '^[a-z]*[0-5]*$'\">4a4</warning>"); } public void testDoNotMarkOneOfThatDiffersWithFormat() { @Language("JSON") final String schema = "{\n" + "\n" + " \"properties\": {\n" + " \"withFormat\": {\n" + " \"type\": \"string\"," + " \"oneOf\": [\n" + " {\n" + " \"format\":\"hostname\"\n" + " },\n" + " {\n" + " \"format\": \"ip4\"\n" + " }\n" + " ]\n" + " }\n" + " }\n" + "}"; doTest(schema, "withFormat: localhost"); } public void testAcceptSchemaWithoutType() { @Language("JSON") final String schema = "{\n" + "\n" + " \"properties\": {\n" + " \"withFormat\": {\n" + " \"oneOf\": [\n" + " {\n" + " \"format\":\"hostname\"\n" + " },\n" + " {\n" + " \"format\": \"ip4\"\n" + " }\n" + " ]\n" + " }\n" + " }\n" + "}"; doTest(schema, "withFormat: localhost"); } public void testArrayItemReference() { @Language("JSON") final String schema = "{\n" + " \"items\": [\n" + " {\n" + " \"type\": \"integer\"\n" + " },\n" + " {\n" + " \"$ref\": \"#/items/0\"\n" + " }\n" + " ]\n" + "}"; doTest(schema, "- 1\n- 2"); doTest(schema, "- 1\n- <warning>foo</warning>"); } public void testValidateAdditionalItems() { @Language("JSON") final String schema = "{\n" + " \"definitions\": {\n" + " \"options\": {\n" + " \"type\": \"array\",\n" + " \"items\": {\n" + " \"type\": \"number\"\n" + " }\n" + " }\n" + " },\n" + " \"items\": [\n" + " {\n" + " \"type\": \"boolean\"\n" + " },\n" + " {\n" + " \"type\": \"boolean\"\n" + " }\n" + " ],\n" + " \"additionalItems\": {\n" + " \"$ref\": \"#/definitions/options/items\"\n" + " }\n" + "}"; doTest(schema, "- true\n- true"); doTest(schema, "- true\n- true\n- 1\n- 2\n- 3"); doTest(schema, "- true\n- true\n- 1\n- <warning>qq</warning>"); } public void testExclusiveMinMaxV6_1() { @Language("JSON") String exclusiveMinSchema = "{\"properties\": {\"prop\": {\"exclusiveMinimum\": 3}}}"; doTest(exclusiveMinSchema, "prop: <warning>2</warning>"); doTest(exclusiveMinSchema, "prop: <warning>3</warning>"); doTest(exclusiveMinSchema, "prop: 4"); } public void testExclusiveMinMaxV6_2() { @Language("JSON") String exclusiveMaxSchema = "{\"properties\": {\"prop\": {\"exclusiveMaximum\": 3}}}"; doTest(exclusiveMaxSchema, "prop: 2"); doTest(exclusiveMaxSchema, "prop: <warning>3</warning>"); doTest(exclusiveMaxSchema, "prop: <warning>4</warning>"); } /*todo later public void testPropertyNamesV6() { doTest("{\"propertyNames\": {\"minLength\": 7}}", "{<warning>\"prop\"</warning>: 2}"); doTest("{\"properties\": {\"prop\": {\"propertyNames\": {\"minLength\": 7}}}}", "{\"prop\": {<warning>\"qq\"</warning>: 7}}"); }*/ public void testContainsV6() { @Language("JSON") String schema = "{\"properties\": {\"prop\": {\"type\": \"array\", \"contains\": {\"type\": \"number\"}}}}"; doTest(schema, "prop:\n <warning>- a\n - true</warning>"); doTest(schema, "prop:\n - a\n - true\n - 1"); } public void testConstV6() { @Language("JSON") String schema = "{\"properties\": {\"prop\": {\"type\": \"string\", \"const\": \"foo\"}}}"; doTest(schema, "prop: <warning>a</warning>"); doTest(schema, "prop: <warning>5</warning>"); doTest(schema, "prop: foo"); } public void testIfThenElseV7() { @Language("JSON") String schema = "{\n" + " \"if\": {\n" + " \"properties\": {\n" + " \"a\": {\n" + " \"type\": \"string\"\n" + " }\n" + " },\n" + " \"required\": [\"a\"]\n" + " },\n" + " \"then\": {\n" + " \"properties\": {\n" + " \"b\": {\n" + " \"type\": \"number\"\n" + " }\n" + " },\n" + " \"required\": [\"b\"]\n" + " },\n" + " \"else\": {\n" + " \"properties\": {\n" + " \"c\": {\n" + " \"type\": \"boolean\"\n" + " }\n" + " },\n" + " \"required\": [\"c\"]\n" + " }\n" + "}"; doTest(schema, "c: <warning>5</warning>"); doTest(schema, "c: true"); doTest(schema, "<warning>a: a\nc: true</warning>"); doTest(schema, "a: a\nb: <warning>true</warning>"); doTest(schema, "a: a\nb: 5"); } public void testNestedOneOf() { @Language("JSON") String schema = "{\"type\":\"object\",\n" + " \"oneOf\": [\n" + " {\n" + " \"properties\": {\n" + " \"type\": {\n" + " \"type\": \"string\",\n" + " \"oneOf\": [\n" + " {\n" + " \"pattern\": \"(good)\"\n" + " },\n" + " {\n" + " \"pattern\": \"(ok)\"\n" + " }\n" + " ]\n" + " }\n" + " }\n" + " },\n" + " {\n" + " \"properties\": {\n" + " \"type\": {\n" + " \"type\": \"string\",\n" + " \"pattern\": \"^(fine)\"\n" + " },\n" + " \"extra\": {\n" + " \"type\": \"string\"\n" + " }\n" + " },\n" + " \"required\": [\"type\", \"extra\"]\n" + " }\n" + " ]}"; doTest(schema, "type: good"); doTest(schema, "type: ok"); doTest(schema, "type: <warning>doog</warning>"); doTest(schema, "type: <warning>ko</warning>"); } public void testArrayRefs() { @Language("JSON") String schema = "{\n" + " \"myDefs\": {\n" + " \"myArray\": [\n" + " {\n" + " \"type\": \"number\"\n" + " },\n" + " {\n" + " \"type\": \"boolean\"\n" + " }\n" + " ]\n" + " },\n" + " \"type\": \"array\",\n" + " \"items\": [\n" + " {\n" + " \"$ref\": \"#/myDefs/myArray/0\"\n" + " },\n" + " {\n" + " \"$ref\": \"#/myDefs/myArray/1\"\n" + " }\n" + " ]\n" + "}"; doTest(schema, "- 1\n- <warning>2</warning>"); doTest(schema, "- <warning>a</warning>\n- <warning>2</warning>"); doTest(schema, "- <warning>a</warning>\n- true"); doTest(schema, "- 1\n- false"); } public void testWithTags() { @Language("JSON") String schema = "{\"properties\": { \"platform\": { \"enum\": [\"x86\", \"x64\"] } }}"; doTest(schema, "platform:\n !!str x64"); doTest(schema, "platform:\n <warning>a x64</warning>"); } public void testAmazonElasticSchema() throws Exception { @Language("JSON") String schema = FileUtil.loadFile(new File(getTestDataPath() + "/cloudformation.schema.json")); doTest(schema, "Resources:\n" + " ElasticsearchCluster:\n" + " Type: \"AWS::Elasticsearch::Domain\"\n" + " Properties:\n" + " ElasticsearchVersion: !FindInMap [ElasticSearchConfig, !Ref AccountType, Version]\n" + "Conditions:\n" + " IsDev: !Equals [!Ref AccountType, dev]"); } public void testGitlabSchema() throws Exception { @Language("JSON") String schema = FileUtil.loadFile(new File(getTestDataPath() + "/gitlab-ci.schema.json")); doTest(schema, "a:\n" + " extends: .b\n" + " script: echo"); } @Language("JSON") private static final String SCHEMA_FOR_REFS = "{\n" + " \"type\": \"object\",\n" + "\n" + " \"properties\": {\n" + " \"name\": { \"type\": \"string\", \"enum\": [\"aa\", \"bb\"] },\n" + " \"bar\": {\n" + " \"required\": [\n" + " \"a\"\n" + " ],\n" + " \"properties\": {\n" + " \"a\": {\n" + " \"type\": [\"array\"]\n" + " },\n" + " \"b\": {" + " \"type\": [\"number\"]" + " }\n" + " },\n" + " \"additionalProperties\": false\n" + " }\n" + " }\n" + "}\n"; public void testRefExtends() { // no warning about missing required property - it should be discovered in referenced object // no warning about extra 'property' with name '<<' with additionalProperties=false doTest(SCHEMA_FOR_REFS, "a: &a\n" + " a: <warning descr=\"Schema validation: Incompatible types.\n Required: array. Actual: integer.\">7</warning>\n" + "\n" + "bar:\n" + " <<: *a\n" + " b: 5\n"); } public void testRefRefValid() { // no warnings - &a references &b, which is an array - validation passes doTest(SCHEMA_FOR_REFS, "x: &b\n" + " - x\n" + " - y\n" + "\n" + "a: &a\n" + " a: *b\n" + "\n" + "bar:\n" + " <<: *a\n" + " b: 5"); } public void testRefRefInvalid() { doTest(SCHEMA_FOR_REFS, "x: &b <warning descr=\"Schema validation: Incompatible types.\n Required: array. Actual: number.\">7</warning>\n" + "\n" + "a: &a\n" + " a: *b\n" + "\n" + "bar:\n" + " <<: *a\n" + " b: 5"); } public void testRefRefScalarValid() { doTest(SCHEMA_FOR_REFS, "x: &b 7\n" + "\n" + "a: &a\n" + " b: *b\n" + "\n" + "bar:\n" + " <<: *a\n" + " a: <warning descr=\"Schema validation: Incompatible types.\n Required: array. Actual: integer.\">5</warning>"); } public void testInlineRef() { doTest(SCHEMA_FOR_REFS, "bar:\n" + " <<: &q\n" + " a: <warning descr=\"Schema validation: Incompatible types.\n Required: array. Actual: integer.\">5</warning>\n" + " b: 5"); } static String schema(final String s) { return "{\"type\": \"object\", \"properties\": {\"prop\": " + s + "}}"; } public static String rootObjectRedefinedSchema() { return "{\n" + " \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n" + " \"type\": \"object\",\n" + " \"$ref\" : \"#/definitions/root\",\n" + " \"definitions\": {\n" + " \"root\" : {\n" + " \"type\": \"object\",\n" + " \"additionalProperties\": false,\n" + " \"properties\": {\n" + " \"r1\": {\n" + " \"type\": \"string\"\n" + " },\n" + " \"r2\": {\n" + " \"type\": \"string\"\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}\n"; } public void testTravisPythonVersion() throws Exception { @Language("JSON") String schema = FileUtil.loadFile(new File(getTestDataPath() + "/travis.schema.json")); doTest(schema, "python: 3.5"); // validates as 'number' doTest(schema, "python: 3.50"); // validates as 'number' doTest(schema, "python: 3.50a"); // validates as 'string' doTest(schema, "python: <warning descr=\"Schema validation: Incompatible types.\n Required one of: array, number, string. Actual: null.\">null</warning>"); } public void testTravisNode() throws Exception { @Language("JSON") String schema = FileUtil.loadFile(new File(getTestDataPath() + "/travis.schema.json")); doTest(schema, "node_js: \n" + " - <warning descr=\"Schema validation: Incompatible types.\n Required: string. Actual: number.\">2.10</warning>"); } public void testTravisMultiDocument() throws Exception { @Language("JSON") String schema = FileUtil.loadFile(new File(getTestDataPath() + "/travis.schema.json")); doTest(schema, "after_script: true\n" + "sbt_args: <warning>1</warning>\n" + "---\n" + "after_script: true\n" + "sbt_args: <warning>1</warning>\n"); } public void testExpNumberNotation() { doTest("{\n" + " \"properties\": {\n" + " \"x\": {\n" + " \"type\": \"number\"\n" + " }\n" + " }\n" + "}", "x: 2.99792458e8"); } public void testTreatEmptyValueAsNull_1() { doTest("{\n" + " \"properties\": {\n" + " \"x\": {\n" + " \"type\": \"number\"\n" + " }\n" + " }\n" + "}", "x:<warning descr=\"Schema validation: Incompatible types.\n Required: number. Actual: null.\"> </warning>"); } public void testTreatEmptyValueAsNull_2() { doTest("{\n" + " \"properties\": {\n" + " \"x\": {\n" + " \"type\": \"null\"\n" + " }\n" + " }\n" + "}", "x: "); } public void testEmptyValueInArray() { doTest("{\n" + " \"type\": \"object\",\n" + "\n" + " \"properties\": {\n" + " \"versionAsStringArray\": {\n" + " \"type\": \"array\",\n" + " \"items\": {\n" + " \"type\": \"string\"\n" + " }\n" + " }\n" + " }\n" + "}", "versionAsStringArray:\n" + " -<warning descr=\"Schema validation: Incompatible types.\n Required: string. Actual: null.\"> </warning>\n" + " <warning descr=\"Schema validation: Incompatible types.\n Required: string. Actual: null.\">-</warning>\n" + " - a"); } public void testEmptyFile() { doTest("{\n" + " \"type\": \"object\",\n" + "\n" + " \"properties\": {\n" + " \"versionAsStringArray\": {\n" + " \"type\": \"array\"\n" + " }\n" + " },\n" + " \"required\": [\"versionAsStringArray\"]\n" + "}", "<warning descr=\"Schema validation: Missing required property 'versionAsStringArray'\"></warning>"); } public void testEmptyValueBetweenProps() { doTest("{\n" + " \"type\": \"object\",\n" + "\n" + " \"properties\": {\n" + " \"versionAsStringArray\": {\n" + " \"type\": \"object\",\n" + " \"properties\": {\n" + " \"xxx\": {\n" + " \"type\": \"number\"\n" + " },\n" + " \"yyy\": {\n" + " \"type\": \"string\"\n" + " },\n" + " \"zzz\": {\n" + " \"type\": \"number\"\n" + " }\n" + " },\n" + " \"required\": [\"xxx\", \"yyy\", \"zzz\"]\n" + " }\n" + " },\n" + " \"required\": [\"versionAsStringArray\"]\n" + "}", "versionAsStringArray:\n" + " zzz: 0\n" + " yyy:<warning descr=\"Schema validation: Incompatible types.\n Required: string. Actual: null.\"> </warning>\n" + " xxx: 0"); } public void testDeprecation() { doTest("{\"properties\": {\n" + " \"myPropertyXxx\": {\n" + " \"deprecationMessage\": \"Baz\",\n" + " \"description\": \"Foo bar\"\n" + " }\n" + " }}", "<weak_warning descr=\"Key 'myPropertyXxx' is deprecated: Baz\">myPropertyXxx</weak_warning>: a"); } public void testPropertyNameSchema() { doTest("{\n" + " \"type\": \"object\",\n" + " \"patternProperties\": {\n" + " \".*\": {\n" + " \"type\": \"boolean\"\n" + " }\n" + " },\n" + " \"propertyNames\": {\n" + " \"enum\": [\"a\", \"b\"]\n" + " }\n" + "}", "<warning>r</warning>: true"); } public void _testTypeVariants() throws IOException { @Language("JSON") String schema = FileUtil.loadFile(new File(getTestDataPath() + "/prometheus.schema.json")); doTest(schema, "alerting:\n" + " alertmanagers:\n" + " - static_configs:\n" + " - targets: <warning>1</warning> \n" + " # - alertmanager:9093 \n" + "\n" + "rule_files:\n" + " # - \"first_rules.yml\"\n" + " # - \"second_rules.yml\""); } }
/* * This file is part of QuickStart Module Loader, licensed under the MIT License (MIT). See the LICENSE.txt file * at the root of this project for more details. */ package uk.co.drnaylor.quickstart; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import ninja.leaping.configurate.ConfigurationNode; import ninja.leaping.configurate.ConfigurationOptions; import ninja.leaping.configurate.loader.ConfigurationLoader; import ninja.leaping.configurate.objectmapping.ObjectMappingException; import ninja.leaping.configurate.objectmapping.serialize.ConfigSerializable; import uk.co.drnaylor.quickstart.annotations.ModuleData; import uk.co.drnaylor.quickstart.config.AbstractConfigAdapter; import uk.co.drnaylor.quickstart.config.NoMergeIfPresent; import uk.co.drnaylor.quickstart.config.TypedAbstractConfigAdapter; import uk.co.drnaylor.quickstart.enums.ConstructionPhase; import uk.co.drnaylor.quickstart.enums.LoadingStatus; import uk.co.drnaylor.quickstart.enums.ModulePhase; import uk.co.drnaylor.quickstart.exceptions.IncorrectAdapterTypeException; import uk.co.drnaylor.quickstart.exceptions.MissingDependencyException; import uk.co.drnaylor.quickstart.exceptions.NoModuleException; import uk.co.drnaylor.quickstart.exceptions.QuickStartModuleDiscoveryException; import uk.co.drnaylor.quickstart.exceptions.QuickStartModuleLoaderException; import uk.co.drnaylor.quickstart.exceptions.UndisableableModuleException; import uk.co.drnaylor.quickstart.loaders.PhasedModuleEnabler; import java.io.IOException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import javax.annotation.Nullable; /** * The {@link ModuleHolder} contains all modules for a particular modular system. * It handles all the discovery, module config file generation, loading and enabling of modules. * * <p> * A system may have multiple module holders. Each module holders is completely separate from one another. * </p> */ public abstract class ModuleHolder<M extends Module, D extends M> { /** * The class type {@link M} of the basic class. */ private final Class<M> baseClass; /** * The class type {@link D} of the module type. */ private final Class<D> disableableClass; /** * Whether the module are disableable at runtime. */ private final boolean allowDisabling; /** * The current phase of the container. */ private ConstructionPhase currentPhase = ConstructionPhase.INITALISED; /** * The modules that have been discovered by the container. */ private final Map<String, ModuleMetadata<? extends M>> discoveredModules = Maps.newLinkedHashMap(); /** * Loaded modules that can be disabled. */ private final Map<String, ModuleMetadata<? extends D>> enabledDisableableModules = Maps.newHashMap(); /** * The modules that are enabled. */ private final Map<String, M> enabledModules = Maps.newHashMap(); /** * The actual disableable module objects */ private final Map<String, D> disableableModules = Maps.newHashMap(); /** * Contains the main configuration file. */ protected final SystemConfig<?, M> config; /** * The logger to use. */ protected final LoggerProxy loggerProxy; /** * Provides a way to enable modules. */ private final PhasedModuleEnabler<M, D> enabler; /** * Whether the {@link ModuleData} annotation must be present on modules. */ private final boolean requireAnnotation; /** * Whether or not to take note of {@link NoMergeIfPresent} annotations on configs. */ private final boolean processDoNotMerge; /** * The function that determines configuration headers for an entry. */ private final Function<M, String> headerProcessor; /** * The function that determines the descriptions for a module's name. */ private final Function<Class<? extends M>, String> descriptionProcessor; /** * The name of the configuration section that contains the module flags */ private final String moduleSection; /** * The header of the configuration section that contains the module flags */ @Nullable private final String moduleSectionHeader; protected <R extends ModuleHolder<M, D>, B extends Builder<M, D, R, B>> ModuleHolder(B builder) throws QuickStartModuleDiscoveryException { try { this.baseClass = builder.moduleType; this.disableableClass = builder.disableableClass; this.config = new SystemConfig<>(builder.configurationLoader, builder.loggerProxy, builder.configurationOptionsTransformer, ImmutableList.copyOf(builder.transformations)); this.loggerProxy = builder.loggerProxy; this.enabler = builder.enabler; this.requireAnnotation = builder.requireAnnotation; this.processDoNotMerge = builder.doNotMerge; this.descriptionProcessor = builder.moduleDescriptionHandler == null ? m -> { ModuleData md = m.getAnnotation(ModuleData.class); if (md != null) { return md.description(); } return ""; } : builder.moduleDescriptionHandler; this.headerProcessor = builder.moduleConfigurationHeader == null ? m -> "" : builder.moduleConfigurationHeader; this.moduleSection = builder.moduleConfigSection; this.moduleSectionHeader = builder.moduleDescription; this.allowDisabling = builder.allowDisabling; } catch (Exception e) { throw new QuickStartModuleDiscoveryException("Unable to start QuickStart", e); } } public final void startDiscover() throws QuickStartModuleDiscoveryException { try { Preconditions.checkState(currentPhase == ConstructionPhase.INITALISED); currentPhase = ConstructionPhase.DISCOVERING; Set<Class<? extends M>> modules = discoverModules(); HashMap<String, ModuleMetadata<? extends M>> discovered = Maps.newHashMap(); for (Class<? extends M> s : modules) { // If we have a module annotation, we are golden. String id; ModuleMetadata<? extends M> ms; if (s.isAnnotationPresent(ModuleData.class)) { ModuleData md = s.getAnnotation(ModuleData.class); id = md.id().toLowerCase(); ms = new ModuleMetadata<>(s, this.disableableClass.isAssignableFrom(s), md); } else if (this.requireAnnotation) { loggerProxy.warn(MessageFormat.format("The module class {0} does not have a ModuleData annotation associated with it. " + "It is not being loaded as the module container requires the annotation to be present.", s.getName())); continue; } else { id = s.getName().toLowerCase(); loggerProxy.warn(MessageFormat.format("The module {0} does not have a ModuleData annotation associated with it. We're just assuming an ID of {0}.", id)); ms = new ModuleMetadata<>(s, this.disableableClass.isAssignableFrom(s), id, id, LoadingStatus.ENABLED, false); } if (discovered.containsKey(id)) { throw new QuickStartModuleDiscoveryException("Duplicate module ID \"" + id + "\" was discovered - loading cannot continue."); } discovered.put(id, ms); } // Create the dependency map. resolveDependencyOrder(discovered); // Modules discovered. Create the Module Config adapter. List<ModuleMetadata<? extends M>> moduleMetadataList = this.discoveredModules.values().stream() .filter(rModuleMetadata -> !rModuleMetadata.isMandatory()) .collect(Collectors.toList()); // Attaches config adapter and loads in the defaults. config.attachModulesConfig(moduleMetadataList, this.descriptionProcessor, this.moduleSection, this.moduleSectionHeader); config.saveAdapterDefaults(false); // Load what we have in config into our discovered modules. try { config.getConfigAdapter().getNode().forEach((k, v) -> { try { ModuleMetadata<? extends M> ms = discoveredModules.get(k); if (ms != null) { ms.setStatus(v); } else { loggerProxy.warn(String.format("Ignoring module entry %s in the configuration file: module does not exist.", k)); } } catch (IllegalStateException ex) { loggerProxy.warn("A mandatory module can't have its status changed by config. Falling back to FORCELOAD for " + k); } }); } catch (ObjectMappingException e) { loggerProxy.warn("Could not load modules config, falling back to defaults."); e.printStackTrace(); } // Modules have been discovered. currentPhase = ConstructionPhase.DISCOVERED; } catch (QuickStartModuleDiscoveryException ex) { throw ex; } catch (Exception e) { throw new QuickStartModuleDiscoveryException("Unable to discover QuickStart modules", e); } } private void resolveDependencyOrder(Map<String, ModuleMetadata<? extends M>> modules) throws Exception { // First, get the modules that have no deps. processDependencyStep(modules, x -> x.getValue().getDependencies().isEmpty() && x.getValue().getSoftDependencies().isEmpty()); while (!modules.isEmpty()) { Set<String> addedModules = discoveredModules.keySet(); processDependencyStep(modules, x -> addedModules.containsAll(x.getValue().getDependencies()) && addedModules.containsAll(x.getValue().getSoftDependencies())); } } private void processDependencyStep(Map<String, ModuleMetadata<? extends M>> modules, Predicate<Map.Entry<String, ModuleMetadata<? extends M>>> predicate) { // Filter on the predicate List<Map.Entry<String, ModuleMetadata<? extends M>>> modulesToAdd = modules.entrySet().stream().filter(predicate) .sorted((x, y) -> x.getValue().isMandatory() == y.getValue().isMandatory() ? x.getKey().compareTo(y.getKey()) : Boolean.compare(x.getValue().isMandatory(), y.getValue().isMandatory())) .collect(Collectors.toList()); if (modulesToAdd.isEmpty()) { throw new IllegalStateException("Some modules have circular dependencies: " + String.join(", ", modules.keySet())); } modulesToAdd.forEach(x -> { discoveredModules.put(x.getKey(), x.getValue()); modules.remove(x.getKey()); }); } private boolean dependenciesSatisfied(ModuleMetadata<? extends M> moduleMetadata, Set<String> enabledModules) { if (moduleMetadata.getDependencies().isEmpty()) { return true; } for (String m : moduleMetadata.getDependencies()) { if (!enabledModules.contains(m) || !dependenciesSatisfied(this.discoveredModules.get(m), enabledModules)) { return false; } } // We know the deps are satisfied. return true; } protected abstract Set<Class<? extends M>> discoverModules() throws Exception; /** * Gets the current phase of the module loader. * * @return The {@link ConstructionPhase} */ public ConstructionPhase getCurrentPhase() { return currentPhase; } /** * Gets a set of IDs of modules that are going to be loaded. * * @return The modules that are going to be loaded. */ public Set<String> getDisableableModules() { return getModules(ModuleStatusTristate.ENABLE); } /** * Gets a set of IDs of modules. * * @param enabledOnly If <code>true</code>, only return modules that are going to be loaded. * @return The modules. */ public Set<String> getModules(final ModuleStatusTristate enabledOnly) { Preconditions.checkNotNull(enabledOnly); Preconditions.checkState(currentPhase != ConstructionPhase.INITALISED && currentPhase != ConstructionPhase.DISCOVERING); return discoveredModules.entrySet().stream().filter(enabledOnly.statusPredicate).map(Map.Entry::getKey).collect(Collectors.toSet()); } /** * Gets an immutable {@link Map} of module IDs to their {@link LoadingStatus} (disabled, enabled, forceload). * * @return The modules with their loading states. */ public Map<String, LoadingStatus> getModulesWithLoadingState() { Preconditions.checkState(currentPhase != ConstructionPhase.INITALISED && currentPhase != ConstructionPhase.DISCOVERING); return ImmutableMap.copyOf(discoveredModules.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, v -> v.getValue().getStatus()))); } /** * Gets whether a module is enabled and loaded. * * @param moduleId The module ID to check for. * @return <code>true</code> if it is enabled. * @throws NoModuleException Thrown if the module does not exist and modules have been loaded. */ public boolean isModuleLoaded(String moduleId) throws NoModuleException { if (currentPhase != ConstructionPhase.ENABLING && currentPhase != ConstructionPhase.ENABLED) { return false; } ModuleMetadata ms = discoveredModules.get(moduleId); if (ms == null) { // No module throw new NoModuleException(moduleId); } return ms.getPhase() == ModulePhase.ENABLED; } /** * Requests that a module be disabled. This can only be run during the {@link ConstructionPhase#DISCOVERED} phase, or for * {@link Module}s that are disableable (of type {@link D}, {@link ConstructionPhase#ENABLED}. * * @param moduleName The ID of the module. * @throws UndisableableModuleException if the module can't be disabled. * @throws NoModuleException if the module does not exist. * @throws QuickStartModuleLoaderException if there is a failure during disabling */ public void disableModule(String moduleName) throws UndisableableModuleException, NoModuleException, QuickStartModuleLoaderException { if (currentPhase == ConstructionPhase.DISCOVERED) { ModuleMetadata ms = discoveredModules.get(moduleName); if (ms == null) { // No module throw new NoModuleException(moduleName); } if (ms.isMandatory() || ms.getStatus() == LoadingStatus.FORCELOAD) { throw new UndisableableModuleException(moduleName); } ms.setStatus(LoadingStatus.DISABLED); } else { Preconditions.checkState(currentPhase == ConstructionPhase.ENABLED); if (!this.allowDisabling) { throw new UndisableableModuleException(moduleName.toLowerCase(), "Cannot disable modules in this holder."); } ModuleMetadata ms = this.enabledDisableableModules.get(moduleName); if (ms == null || !ms.isRuntimeAlterable()) { throw new UndisableableModuleException(moduleName.toLowerCase(), "Cannot disable this module at runtime!"); } Preconditions.checkState(ms.getPhase() != ModulePhase.ERRORED, "Cannot disable this module as it errored!"); Preconditions.checkState(ms.getPhase() == ModulePhase.ENABLED, "Cannot disable this module as it is not enabled!"); // disableModule(moduleName.toLowerCase()); D module = this.disableableModules.get(moduleName); for (String phase : this.enabler.getDisablePhases()) { try { this.enabler.startDisablePhase(phase, this, module); } catch (Exception e) { detachConfig(ms.getName()); ms.setPhase(ModulePhase.ERRORED); throw new QuickStartModuleLoaderException.Disabling( module.getClass(), "Could not disable the module " + ms.getId(), e ); } } detachConfig(ms.getName()); ms.setPhase(ModulePhase.DISABLED); this.enabledModules.remove(moduleName); this.enabledDisableableModules.remove(moduleName); } } protected final Class<M> getBaseClass() { return this.baseClass; } /** * Get n enabled module given the ID. * * @param id The ID * @param <T> The type, for duck typing * @return The module, if it exists. */ @SuppressWarnings("unchecked") public <T extends M> Optional<T> getModule(String id) { return Optional.ofNullable((T) this.enabledModules.get(id)); } protected M getModule(ModuleMetadata<? extends M> spec) throws Exception { M module = this.enabledModules.get(spec.getId()); if (module == null) { return constructModule(spec); } return module; } protected abstract M constructModule(ModuleMetadata<? extends M> spec) throws Exception; /** * Starts the module construction and enabling phase. This is the final phase for loading the modules. * * <p> * Once this method is called, modules can no longer be removed. * </p> * * @param failOnOneError If set to <code>true</code>, one module failure will mark the whole loading sequence as failed. * Otherwise, no modules being constructed will cause a failure. * * @throws QuickStartModuleLoaderException.Construction if the modules cannot be constructed. * @throws QuickStartModuleLoaderException.Enabling if the modules cannot be enabled. */ public void loadModules(boolean failOnOneError) throws QuickStartModuleLoaderException.Construction, QuickStartModuleLoaderException.Enabling { Preconditions.checkArgument(currentPhase == ConstructionPhase.DISCOVERED); currentPhase = ConstructionPhase.ENABLING; // Get the modules that are being disabled and mark them as such. Set<String> disabledModules = getModules(ModuleStatusTristate.DISABLE); while (!disabledModules.isEmpty()) { // Find any modules that have dependencies on disabled modules, and disable them. List<ModuleMetadata<? extends M>> toDisable = getModules(ModuleStatusTristate.ENABLE) .stream() .map(discoveredModules::get) .filter(x -> !Collections.disjoint(disabledModules, x.getDependencies())) .collect(Collectors.toList()); if (toDisable.isEmpty()) { break; } if (toDisable.stream().anyMatch(ModuleMetadata::isMandatory)) { String s = toDisable.stream().filter(ModuleMetadata::isMandatory).map(ModuleMetadata::getId).collect(Collectors.joining(", ")); Class<? extends M> m = toDisable.stream().filter(ModuleMetadata::isMandatory).findFirst().get().getModuleClass(); throw new QuickStartModuleLoaderException.Construction(m, "Tried to disable mandatory module", new IllegalStateException("Dependency failure, tried to disable a mandatory module (" + s + ")")); } toDisable.forEach(k -> { k.setStatus(LoadingStatus.DISABLED); disabledModules.add(k.getId()); }); } // Make sure we get a clean slate here. getModules(ModuleStatusTristate.DISABLE).forEach(k -> discoveredModules.get(k).setPhase(ModulePhase.DISABLED)); // Construct them for (String s : getModules(ModuleStatusTristate.ENABLE)) { ModuleMetadata<? extends M> ms = discoveredModules.get(s); try { enabledModules.put(s, constructModule(ms)); ms.setPhase(ModulePhase.CONSTRUCTED); } catch (Exception construction) { construction.printStackTrace(); ms.setPhase(ModulePhase.ERRORED); loggerProxy.error("The module " + ms.getModuleClass().getName() + " failed to construct."); if (failOnOneError) { currentPhase = ConstructionPhase.ERRORED; throw new QuickStartModuleLoaderException.Construction(ms.getModuleClass(), "The module " + ms.getModuleClass().getName() + " failed to construct.", construction); } } } if (enabledModules.isEmpty()) { currentPhase = ConstructionPhase.ERRORED; throw new QuickStartModuleLoaderException.Construction(null, "No modules were constructed.", null); } enabledModules.forEach((k, v) -> { if (this.disableableClass.isAssignableFrom(v.getClass())) { this.disableableModules.put(k, this.disableableClass.cast(v)); } }); int size = enabledModules.size(); { Iterator<Map.Entry<String, M>> im = enabledModules.entrySet().iterator(); while (im.hasNext()) { Map.Entry<String, M> module = im.next(); try { module.getValue().checkExternalDependencies(); } catch (MissingDependencyException ex) { this.discoveredModules.get(module.getKey()).setStatus(LoadingStatus.DISABLED); this.discoveredModules.get(module.getKey()).setPhase(ModulePhase.DISABLED); this.loggerProxy.warn("Module " + module.getKey() + " can not be enabled because an external dependency could not be satisfied."); this.loggerProxy.warn("Message was: " + ex.getMessage()); im.remove(); } } } while (size != enabledModules.size()) { // We might need to disable modules. size = enabledModules.size(); Iterator<Map.Entry<String, M>> im = enabledModules.entrySet().iterator(); while (im.hasNext()) { Map.Entry<String, M> module = im.next(); if (!dependenciesSatisfied(this.discoveredModules.get(module.getKey()), getModules(ModuleStatusTristate.ENABLE))) { im.remove(); this.loggerProxy.warn("Module " + module.getKey() + " can not be enabled because an external dependency on a module it " + "depends on could not be satisfied."); this.discoveredModules.get(module.getKey()).setStatus(LoadingStatus.DISABLED); this.discoveredModules.get(module.getKey()).setPhase(ModulePhase.DISABLED); } } } // Enter Config Adapter phase - attaching before enabling so that enable methods can get any associated configurations. for (String s : enabledModules.keySet()) { M m = enabledModules.get(s); try { attachConfig(s, m); } catch (Exception e) { e.printStackTrace(); if (failOnOneError) { throw new QuickStartModuleLoaderException.Enabling(m.getClass(), "Failed to attach config.", e); } } } // Enter Enable phase. Set<String> phases = this.enabler.getEnablePhases(); for (String phase : phases) { loggerProxy.info(String.format("Starting phase: %s", phase)); try { this.enabler.startEnablePrePhase(phase, this); } catch (Exception ex) { this.currentPhase = ConstructionPhase.ERRORED; throw new RuntimeException("Could not load modules, phase " + phase + " failed to load.", ex); } Iterator<String> is = enabledModules.keySet().iterator(); while (is.hasNext()) { String i = is.next(); ModuleMetadata<? extends M> ms = discoveredModules.get(i); // If the module is errored, then we do not continue. if (ms.getPhase() == ModulePhase.ERRORED) { continue; } try { M m = enabledModules.get(i); this.enabler.startEnablePhase(phase, this, m); } catch (Exception construction) { construction.printStackTrace(); is.remove(); ms.setPhase(ModulePhase.ERRORED); loggerProxy.error("The module " + ms.getModuleClass().getName() + " failed to enable."); if (failOnOneError) { currentPhase = ConstructionPhase.ERRORED; throw new QuickStartModuleLoaderException.Enabling(ms.getModuleClass(), "The module " + ms.getModuleClass().getName() + " failed to enable.", construction); } } } } if (enabledModules.isEmpty()) { currentPhase = ConstructionPhase.ERRORED; throw new QuickStartModuleLoaderException.Enabling(null, "No modules were enabled.", null); } // Modules in this list did not fail. enabledModules.forEach((k, v) -> this.discoveredModules.get(k).setPhase(ModulePhase.ENABLED)); resetDisableableList(); try { config.saveAdapterDefaults(this.processDoNotMerge); } catch (IOException e) { e.printStackTrace(); } currentPhase = ConstructionPhase.ENABLED; } private void resetDisableableList() { this.enabledDisableableModules.clear(); this.discoveredModules.values().stream() .filter(x -> x.getPhase() == ModulePhase.ENABLED) .filter(ModuleMetadata::isRuntimeAlterable) .forEach(x -> this.enabledDisableableModules.put(x.getId(), (ModuleMetadata<? extends D>) x)); } /** * Enables a {@link D} after the construction has completed. * * @param name The name of the module to load. * @throws Exception thrown if the module is not loadable for any reason, including if it is already enabled. */ public void runtimeEnable(String name) throws Exception { runtimeEnable(ImmutableSet.of(name)); } public void runtimeEnable(Set<String> name) throws Exception { Preconditions.checkState(this.currentPhase == ConstructionPhase.ENABLED); Set<String> modulesToCheck = name.stream().map(String::toLowerCase).collect(Collectors.toSet()); Set<ModuleMetadata<? extends D>> containers = new HashSet<>(); for (String moduleName : modulesToCheck) { Preconditions.checkState(!isModuleLoaded(moduleName), "Module is already loaded!"); ModuleMetadata<? extends M> ms = discoveredModules.get(moduleName); Preconditions.checkState(this.disableableClass.isAssignableFrom(ms.getModuleClass()), "Module " + name + " cannot be enabled at runtime!"); //noinspection unchecked containers.add((ModuleMetadata<? extends D>) ms); } for (ModuleMetadata<? extends D> ms : containers) { try { // Construction D module = this.disableableModules.get(ms.getId()); if (module == null) { module = (D) constructModule(ms); this.disableableModules.put(ms.getId(), module); } ms.setPhase(ModulePhase.CONSTRUCTED); Set<String> phases = this.enabler.getEnablePhases(); module.checkExternalDependencies(); // Enabling for (String phase : phases) { this.enabler.startEnablePhase(phase, this, module); } ms.setPhase(ModulePhase.ENABLED); this.enabledModules.put(ms.getId(), module); } catch (Exception construction) { ms.setPhase(ModulePhase.ERRORED); throw construction; } } resetDisableableList(); } private void attachConfig(String name, M m) throws Exception { Optional<AbstractConfigAdapter<?>> a = m.getConfigAdapter(); if (a.isPresent()) { config.attachConfigAdapter(name, a.get(), this.headerProcessor.apply(m)); } } private void detachConfig(String name) { config.detachConfigAdapter(name); } @SuppressWarnings("unchecked") public final <C extends AbstractConfigAdapter<?>> C getConfigAdapterForModule(String module, Class<C> adapterClass) throws NoModuleException, IncorrectAdapterTypeException { return config.getConfigAdapterForModule(module, adapterClass); } /** * Saves the {@link SystemConfig}. * * @throws IOException If the config could not be saved. */ public final void saveSystemConfig() throws IOException { config.save(); } /** * Refreshes the backing {@link ConfigurationNode} and saves the {@link SystemConfig}. * * @throws IOException If the config could not be saved. */ public final void refreshSystemConfig() throws IOException { config.save(true); } /** * Reloads the {@link SystemConfig}, but does not change any module status. * * @throws IOException If the config could not be reloaded. */ public final void reloadSystemConfig() throws IOException { config.load(); } /** * Gets the registered module ID, if it exists. * * @param module The module. * @return The module ID, or an empty {@link Optional#empty()} */ public final Optional<String> getIdForModule(Module module) { return discoveredModules.entrySet().stream().filter(x -> x.getValue().getModuleClass() == module.getClass()).map(Map.Entry::getKey).findFirst(); } /** * Builder class to create a {@link ModuleHolder} */ public static abstract class Builder<M extends Module, D extends M, R extends ModuleHolder<M, D>, T extends Builder<M, D, R, T>> { boolean allowDisabling = false; final Class<M> moduleType; final Class<D> disableableClass; PhasedModuleEnabler<M, D> enabler; ConfigurationLoader<? extends ConfigurationNode> configurationLoader; boolean requireAnnotation = false; LoggerProxy loggerProxy; final List<AbstractConfigAdapter.Transformation> transformations = new ArrayList<>(); Function<ConfigurationOptions, ConfigurationOptions> configurationOptionsTransformer = x -> x; boolean doNotMerge = false; @Nullable Function<Class<? extends M>, String> moduleDescriptionHandler = null; @Nullable Function<M, String> moduleConfigurationHeader = null; String moduleConfigSection = "modules"; @Nullable String moduleDescription = null; protected abstract T getThis(); /** * Creates a builder with the given type of {@link Module}. * * @param moduleType The type of module. * @param disableableClass The type of disableable module, which extends {@link M} */ public Builder(Class<M> moduleType, Class<D> disableableClass) { this.moduleType = moduleType; this.disableableClass = disableableClass; } /** * Sets the {@link ConfigurationLoader} that will handle the module loading. * * @param configurationLoader The loader to use. * @return This {@link Builder}, for chaining. */ public T setConfigurationLoader(ConfigurationLoader<? extends ConfigurationNode> configurationLoader) { this.configurationLoader = configurationLoader; return getThis(); } /** * Sets a {@link Function} that takes the loader's {@link ConfigurationOptions}, transforms it, and applies it * to nodes when they are loaded. * * <p> * By default, just uses the {@link ConfigurationOptions} of the loader. * </p> * * @param optionsTransformer The transformer * @return This {@link Builder} for chaining. */ public T setConfigurationOptionsTransformer(Function<ConfigurationOptions, ConfigurationOptions> optionsTransformer) { Preconditions.checkNotNull(optionsTransformer); this.configurationOptionsTransformer = optionsTransformer; return getThis(); } /** * Sets the {@link LoggerProxy} to use for log messages. * * @param loggerProxy The logger proxy to use. * @return This {@link Builder}, for chaining. */ public T setLoggerProxy(LoggerProxy loggerProxy) { this.loggerProxy = loggerProxy; return getThis(); } /** * Sets the {@link PhasedModuleEnabler} to run when enabling modules. * * @param enabler The {@link PhasedModuleEnabler} * @return This {@link Builder}, for chaining. */ public T setModuleEnabler(PhasedModuleEnabler<M, D> enabler) { this.enabler = enabler; return getThis(); } /** * Sets whether {@link Module}s must have a {@link ModuleData} annotation to be considered. * * @param requireAnnotation <code>true</code> to require, <code>false</code> otherwise. * @return The {@link Builder}, for chaining. */ public T setRequireModuleDataAnnotation(boolean requireAnnotation) { this.requireAnnotation = requireAnnotation; return getThis(); } /** * Sets whether {@link TypedAbstractConfigAdapter} {@link ConfigSerializable} fields that have the annotation {@link NoMergeIfPresent} * will <em>not</em> be merged into existing config values. * * @param noMergeIfPresent <code>true</code> if fields should be skipped if they are already populated. * @return This {@link Builder}, for chaining. */ public T setNoMergeIfPresent(boolean noMergeIfPresent) { this.doNotMerge = noMergeIfPresent; return getThis(); } /** * Sets the function that is used to set the description for each module in the configuration file. * * <p> * This is displayed above each of the module toggles in the configuration file. * </p> * * @param handler The {@link Function} to use, or {@code null} otherwise. * @return This {@link Builder}, for chaining. */ public T setModuleDescriptionHandler(@Nullable Function<Class<? extends M>, String> handler) { this.moduleDescriptionHandler = handler; return getThis(); } /** * Sets the function that is used to set the header for each module's configuration block in the configuration file. * * <p> * This is displayed above each of the configuration sections in the configuration file. * </p> * * @param header The {@link Function} to use, or {@code null} otherwise. * @return This {@link Builder}, for chaining. */ public T setModuleConfigurationHeader(@Nullable Function<M, String> header) { this.moduleConfigurationHeader = header; return getThis(); } /** * Sets the name of the section that contains the module enable/disable flags. * * @param name The name of the section. Defaults to "modules" * @return This {@link Builder}, for chaining. */ public T setModuleConfigSectionName(String name) { Preconditions.checkNotNull(name); this.moduleConfigSection = name; return getThis(); } /** * Sets the description for the module config section. * * @param description The description, or {@code null} to use the default. * @return This {@link Builder}, for chaining. */ public T setModuleConfigSectionDescription(@Nullable String description) { this.moduleDescription = description; return getThis(); } /** * Tells the system how to transform the entire configuration before it * is loaded. Multiple transformations can be performed by chaining this * method. * * @param transformation The transformation to apply. * @return This {@link Builder}, for chaining. */ public T transformConfig(AbstractConfigAdapter.Transformation transformation) { this.transformations.add(transformation); return getThis(); } /** * Sets whether modules in this module holder can be disabled at runtime. * * @param allowDisable true if so * @return This {@link Builder}, for chaining. */ public T setAllowDisable(boolean allowDisable) { this.allowDisabling = allowDisable; return getThis(); } protected void checkBuild() { Preconditions.checkNotNull(configurationLoader); Preconditions.checkNotNull(moduleConfigSection); Preconditions.checkNotNull(enabler); if (loggerProxy == null) { loggerProxy = DefaultLogger.INSTANCE; } Metadata.getStartupMessage().ifPresent(x -> loggerProxy.info(x)); } public abstract R build() throws Exception; /** * Builds the module container and immediately starts discovery. * * @param startDiscover <code>true</code> if so. * @return The built module container. * @throws Exception if there was a problem during building or discovery. */ public final R build(boolean startDiscover) throws Exception { R build = build(); if (startDiscover) { build.startDiscover(); } return build; } } public enum ModuleStatusTristate { ENABLE(k -> k.getValue().getStatus() != LoadingStatus.DISABLED && k.getValue().getPhase() != ModulePhase.ERRORED && k.getValue().getPhase() != ModulePhase.DISABLED), DISABLE(k -> !ENABLE.statusPredicate.test(k)), ALL(k -> true); private final Predicate<Map.Entry<String, ? extends ModuleMetadata<? extends Module>>> statusPredicate; ModuleStatusTristate(Predicate<Map.Entry<String, ? extends ModuleMetadata<? extends Module>>> p) { statusPredicate = p; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.graph.library.metric.directed; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.flink.api.common.accumulators.LongCounter; import org.apache.flink.api.common.accumulators.LongMaximum; import org.apache.flink.api.java.DataSet; import org.apache.flink.graph.AbstractGraphAnalytic; import org.apache.flink.graph.AnalyticHelper; import org.apache.flink.graph.Graph; import org.apache.flink.graph.Vertex; import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees; import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees.Degrees; import org.apache.flink.graph.asm.result.PrintableResult; import org.apache.flink.graph.library.metric.directed.VertexMetrics.Result; import java.io.IOException; import java.text.NumberFormat; import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT; /** * Compute the following vertex metrics in a directed graph: * - number of vertices * - number of edges * - number of unidirectional edges * - number of bidirectional edges * - average degree * - number of triplets * - maximum degree * - maximum out degree * - maximum in degree * - maximum number of triplets * * @param <K> graph ID type * @param <VV> vertex value type * @param <EV> edge value type */ public class VertexMetrics<K extends Comparable<K>, VV, EV> extends AbstractGraphAnalytic<K, VV, EV, Result> { private static final String VERTEX_COUNT = "vertexCount"; private static final String UNIDIRECTIONAL_EDGE_COUNT = "unidirectionalEdgeCount"; private static final String BIDIRECTIONAL_EDGE_COUNT = "bidirectionalEdgeCount"; private static final String TRIPLET_COUNT = "tripletCount"; private static final String MAXIMUM_DEGREE = "maximumDegree"; private static final String MAXIMUM_OUT_DEGREE = "maximumOutDegree"; private static final String MAXIMUM_IN_DEGREE = "maximumInDegree"; private static final String MAXIMUM_TRIPLETS = "maximumTriplets"; private VertexMetricsHelper<K> vertexMetricsHelper; // Optional configuration private boolean includeZeroDegreeVertices = false; private int parallelism = PARALLELISM_DEFAULT; /** * By default only the edge set is processed for the computation of degree. * When this flag is set an additional join is performed against the vertex * set in order to output vertices with a degree of zero. * * @param includeZeroDegreeVertices whether to output vertices with a * degree of zero * @return this */ public VertexMetrics<K, VV, EV> setIncludeZeroDegreeVertices(boolean includeZeroDegreeVertices) { this.includeZeroDegreeVertices = includeZeroDegreeVertices; return this; } /** * Override the operator parallelism. * * @param parallelism operator parallelism * @return this */ public VertexMetrics<K, VV, EV> setParallelism(int parallelism) { this.parallelism = parallelism; return this; } @Override public VertexMetrics<K, VV, EV> run(Graph<K, VV, EV> input) throws Exception { super.run(input); DataSet<Vertex<K, Degrees>> vertexDegree = input .run(new VertexDegrees<K, VV, EV>() .setIncludeZeroDegreeVertices(includeZeroDegreeVertices) .setParallelism(parallelism)); vertexMetricsHelper = new VertexMetricsHelper<>(); vertexDegree .output(vertexMetricsHelper) .name("Vertex metrics"); return this; } @Override public Result getResult() { long vertexCount = vertexMetricsHelper.getAccumulator(env, VERTEX_COUNT); long unidirectionalEdgeCount = vertexMetricsHelper.getAccumulator(env, UNIDIRECTIONAL_EDGE_COUNT); long bidirectionalEdgeCount = vertexMetricsHelper.getAccumulator(env, BIDIRECTIONAL_EDGE_COUNT); long tripletCount = vertexMetricsHelper.getAccumulator(env, TRIPLET_COUNT); long maximumDegree = vertexMetricsHelper.getAccumulator(env, MAXIMUM_DEGREE); long maximumOutDegree = vertexMetricsHelper.getAccumulator(env, MAXIMUM_OUT_DEGREE); long maximumInDegree = vertexMetricsHelper.getAccumulator(env, MAXIMUM_IN_DEGREE); long maximumTriplets = vertexMetricsHelper.getAccumulator(env, MAXIMUM_TRIPLETS); // each edge is counted twice, once from each vertex, so must be halved return new Result(vertexCount, unidirectionalEdgeCount / 2, bidirectionalEdgeCount / 2, tripletCount, maximumDegree, maximumOutDegree, maximumInDegree, maximumTriplets); } /** * Helper class to collect vertex metrics. * * @param <T> ID type */ private static class VertexMetricsHelper<T> extends AnalyticHelper<Vertex<T, Degrees>> { private long vertexCount; private long unidirectionalEdgeCount; private long bidirectionalEdgeCount; private long tripletCount; private long maximumDegree; private long maximumOutDegree; private long maximumInDegree; private long maximumTriplets; @Override public void writeRecord(Vertex<T, Degrees> record) throws IOException { long degree = record.f1.getDegree().getValue(); long outDegree = record.f1.getOutDegree().getValue(); long inDegree = record.f1.getInDegree().getValue(); long bidirectionalEdges = outDegree + inDegree - degree; long triplets = degree * (degree - 1) / 2; vertexCount++; unidirectionalEdgeCount += degree - bidirectionalEdges; bidirectionalEdgeCount += bidirectionalEdges; tripletCount += triplets; maximumDegree = Math.max(maximumDegree, degree); maximumOutDegree = Math.max(maximumOutDegree, outDegree); maximumInDegree = Math.max(maximumInDegree, inDegree); maximumTriplets = Math.max(maximumTriplets, triplets); } @Override public void close() throws IOException { addAccumulator(VERTEX_COUNT, new LongCounter(vertexCount)); addAccumulator(UNIDIRECTIONAL_EDGE_COUNT, new LongCounter(unidirectionalEdgeCount)); addAccumulator(BIDIRECTIONAL_EDGE_COUNT, new LongCounter(bidirectionalEdgeCount)); addAccumulator(TRIPLET_COUNT, new LongCounter(tripletCount)); addAccumulator(MAXIMUM_DEGREE, new LongMaximum(maximumDegree)); addAccumulator(MAXIMUM_OUT_DEGREE, new LongMaximum(maximumOutDegree)); addAccumulator(MAXIMUM_IN_DEGREE, new LongMaximum(maximumInDegree)); addAccumulator(MAXIMUM_TRIPLETS, new LongMaximum(maximumTriplets)); } } /** * Wraps vertex metrics. */ public static class Result implements PrintableResult { private long vertexCount; private long unidirectionalEdgeCount; private long bidirectionalEdgeCount; private long tripletCount; private long maximumDegree; private long maximumOutDegree; private long maximumInDegree; private long maximumTriplets; public Result(long vertexCount, long unidirectionalEdgeCount, long bidirectionalEdgeCount, long tripletCount, long maximumDegree, long maximumOutDegree, long maximumInDegree, long maximumTriplets) { this.vertexCount = vertexCount; this.unidirectionalEdgeCount = unidirectionalEdgeCount; this.bidirectionalEdgeCount = bidirectionalEdgeCount; this.tripletCount = tripletCount; this.maximumDegree = maximumDegree; this.maximumOutDegree = maximumOutDegree; this.maximumInDegree = maximumInDegree; this.maximumTriplets = maximumTriplets; } /** * Get the number of vertices. * * @return number of vertices */ public long getNumberOfVertices() { return vertexCount; } /** * Get the number of edges. * * @return number of edges */ public long getNumberOfEdges() { return unidirectionalEdgeCount + 2 * bidirectionalEdgeCount; } /** * Get the number of unidirectional edges. * * @return number of unidirectional edges */ public long getNumberOfDirectedEdges() { return unidirectionalEdgeCount; } /** * Get the number of bidirectional edges. * * @return number of bidirectional edges */ public long getNumberOfUndirectedEdges() { return bidirectionalEdgeCount; } /** * Get the average degree, the average number of in- plus out-edges per vertex. * * A result of {@code Float.NaN} is returned for an empty graph for * which both the number of edges and number of vertices is zero. * * @return average degree */ public double getAverageDegree() { return vertexCount == 0 ? Double.NaN : getNumberOfEdges() / (double)vertexCount; } /** * Get the density, the ratio of actual to potential edges between vertices. * * A result of {@code Float.NaN} is returned for a graph with fewer than * two vertices for which the number of edges is zero. * * @return density */ public double getDensity() { return vertexCount <= 1 ? Double.NaN : getNumberOfEdges() / (double)(vertexCount*(vertexCount-1)); } /** * Get the number of triplets. * * @return number of triplets */ public long getNumberOfTriplets() { return tripletCount; } /** * Get the maximum degree. * * @return maximum degree */ public long getMaximumDegree() { return maximumDegree; } /** * Get the maximum out degree. * * @return maximum out degree */ public long getMaximumOutDegree() { return maximumOutDegree; } /** * Get the maximum in degree. * * @return maximum in degree */ public long getMaximumInDegree() { return maximumInDegree; } /** * Get the maximum triplets. * * @return maximum triplets */ public long getMaximumTriplets() { return maximumTriplets; } @Override public String toPrintableString() { NumberFormat nf = NumberFormat.getInstance(); // format for very small fractional numbers NumberFormat ff = NumberFormat.getInstance(); ff.setMaximumFractionDigits(8); return "vertex count: " + nf.format(vertexCount) + "; edge count: " + nf.format(getNumberOfEdges()) + "; unidirectional edge count: " + nf.format(unidirectionalEdgeCount) + "; bidirectional edge count: " + nf.format(bidirectionalEdgeCount) + "; average degree: " + nf.format(getAverageDegree()) + "; density: " + ff.format(getDensity()) + "; triplet count: " + nf.format(tripletCount) + "; maximum degree: " + nf.format(maximumDegree) + "; maximum out degree: " + nf.format(maximumOutDegree) + "; maximum in degree: " + nf.format(maximumInDegree) + "; maximum triplets: " + nf.format(maximumTriplets); } @Override public int hashCode() { return new HashCodeBuilder() .append(vertexCount) .append(unidirectionalEdgeCount) .append(bidirectionalEdgeCount) .append(tripletCount) .append(maximumDegree) .append(maximumOutDegree) .append(maximumInDegree) .append(maximumTriplets) .hashCode(); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj == this) { return true; } if (obj.getClass() != getClass()) { return false; } Result rhs = (Result)obj; return new EqualsBuilder() .append(vertexCount, rhs.vertexCount) .append(unidirectionalEdgeCount, rhs.unidirectionalEdgeCount) .append(bidirectionalEdgeCount, rhs.bidirectionalEdgeCount) .append(tripletCount, rhs.tripletCount) .append(maximumDegree, rhs.maximumDegree) .append(maximumOutDegree, rhs.maximumOutDegree) .append(maximumInDegree, rhs.maximumInDegree) .append(maximumTriplets, rhs.maximumTriplets) .isEquals(); } } }
package org.ovirt.engine.core.bll; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.ovirt.engine.core.common.PermissionSubject; import org.ovirt.engine.core.common.VdcObjectType; import org.ovirt.engine.core.common.action.PermissionsOperationsParametes; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.businessentities.ActionGroup; import org.ovirt.engine.core.common.businessentities.DiskImage; import org.ovirt.engine.core.common.businessentities.Quota; import org.ovirt.engine.core.common.businessentities.QuotaEnforcementTypeEnum; import org.ovirt.engine.core.common.businessentities.QuotaStorage; import org.ovirt.engine.core.common.businessentities.QuotaVdsGroup; import org.ovirt.engine.core.common.businessentities.permissions; import org.ovirt.engine.core.common.businessentities.storage_pool; import org.ovirt.engine.core.common.config.Config; import org.ovirt.engine.core.common.config.ConfigValues; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.dal.VdcBllMessages; import org.ovirt.engine.core.dal.dbbroker.DbFacade; import org.ovirt.engine.core.dao.QuotaDAO; import org.ovirt.engine.core.dao.StorageDomainDAO; import org.ovirt.engine.core.dao.StoragePoolDAO; import org.ovirt.engine.core.dao.VdsGroupDAO; import org.ovirt.engine.core.utils.Pair; import org.ovirt.engine.core.utils.log.Log; import org.ovirt.engine.core.utils.log.LogFactory; public class QuotaHelper { private static final String DEFAULT_QUOTA_NAME_PERFIX = "DefaultQuota-"; public static final Long UNLIMITED = -1L; public static final Long EMPTY = 0L; private static final Log log = LogFactory.getLog(QuotaHelper.class); private QuotaHelper() { } private static final QuotaHelper quotaHelper = new QuotaHelper(); public static QuotaHelper getInstance() { return quotaHelper; } /** * Returns default quota id if the <code>Data Center</code> is disabled, <BR/> * or the quota id that was send. * @param quotaId * @param storagePoolId * @return */ public Guid getQuotaIdToConsume(Guid quotaId, storage_pool storagePool) { Guid returnedQuotaGuid = quotaId; if (storagePool == null) { log.errorFormat("Storage pool is null, Quota id will be set from the parameter"); } else if (storagePool.getQuotaEnforcementType() == QuotaEnforcementTypeEnum.DISABLED) { // If storage pool has disabled quota enforcement, then initialize default quota. log.debugFormat("Storage pool quota is disabled, Quota id which will be consume from is the default DC quota"); returnedQuotaGuid = getQuotaDAO() .getDefaultQuotaByStoragePoolId(storagePool.getId()) .getId(); } return returnedQuotaGuid; } public Collection<PermissionSubject> getPermissionsForDiskImagesList(Collection<DiskImage> diskImages, storage_pool storagePool) { List<PermissionSubject> permissionSubjectList = new ArrayList<PermissionSubject>(); Map<Guid, Object> quotaMap = new HashMap<Guid, Object>(); // Distinct the quotas for images. for (DiskImage diskImage : diskImages) { if (quotaMap.containsKey(diskImage.getQuotaId())) { quotaMap.put(diskImage.getQuotaId(), diskImage.getQuotaId()); permissionSubjectList = addQuotaPermissionSubject(permissionSubjectList, storagePool, diskImage.getQuotaId()); } } return permissionSubjectList; } public void setDefaultQuotaAsRegularQuota(storage_pool storagePool) { Quota quota = getQuotaDAO().getDefaultQuotaByStoragePoolId(storagePool.getId()); if (quota != null) { quota.setIsDefaultQuota(false); getQuotaDAO().update(quota); } } public List<PermissionSubject> addQuotaPermissionSubject(List<PermissionSubject> quotaPermissionList, storage_pool storagePool, Guid quotaId) { if (storagePool != null && storagePool.getQuotaEnforcementType() != QuotaEnforcementTypeEnum.DISABLED) { log.debug("Adding validation for consume quota to permission subjects list"); quotaPermissionList.add(new PermissionSubject(quotaId, VdcObjectType.Quota, ActionGroup.CONSUME_QUOTA)); } return quotaPermissionList; } /** * Returns unlimited Quota for storage pool. * * @param storagePool * - The storage pool to create the unlimited Quota for. * @param isDefaultQuota * - If the generated quota should be the default one or not * @return The quota */ public Quota getUnlimitedQuota(storage_pool storagePool, boolean isDefaultQuota) { return getUnlimitedQuota(storagePool, isDefaultQuota, false).getFirst(); } /** * Returns unlimited Quota for storage pool. * * The return value is a {@link Pair} of the unlimited quota for the storage pool and a {@link Boolean} * indicating if an existing quota was really reused or not. * <b>Notes:</b> * 1. If {@link #allowReuseExisting} is <code>false</code> the return value will always be * a {@link Pair} of a new quota object and <code>false</code>. * 2. If the {@link Quota} object could not be created for any reason (e.g., the given * {@link #storage_pool} was <code>null</code>), a {@link Pair} of <code>null</code> and <code>false</code> * is returned. * * @param storagePool * - The storage pool to create the unlimited Quota for. * @param isDefaultQuota * - If the generated quota should be the default one or not * @param allowReuseExisting * - Whether to use an exiting quota unlimited quota or not * @return A {@link Pair} of a {@link Quota} object and an indication if it was reused or not. */ @SuppressWarnings("null") public Pair<Quota, Boolean> getUnlimitedQuota(storage_pool storagePool, boolean isDefaultQuota, boolean allowReuseExisting) { if (storagePool == null || storagePool.getId() == null) { log.error("Unlimited Quota cannot be created or reused, Storage pool is not valid "); return new Pair<Quota, Boolean>(null, false); } Quota quota = null; boolean isExistingQuotaReused = false; if (allowReuseExisting) { quota = getQuotaDAO().getDefaultQuotaByStoragePoolId(storagePool.getId()); isExistingQuotaReused = (quota != null); } if (!isExistingQuotaReused) { quota = generateUnlimitedQuota(storagePool); } quota.setIsDefaultQuota(isDefaultQuota); return new Pair<Quota, Boolean>(quota, isExistingQuotaReused); } /** * Generates a new default quota * @param storagePool * - The storage pool to create the unlimited Quota for. * @return The generated quota */ private Quota generateUnlimitedQuota(storage_pool storagePool) { // Set new Quota definition. Quota quota = new Quota(); Guid quotaId = Guid.NewGuid(); quota.setId(quotaId); quota.setStoragePoolId(storagePool.getId()); quota.setQuotaName(generateDefaultQuotaName(storagePool)); quota.setDescription("Automatic generated Quota for Data Center " + storagePool.getname()); quota.setThresholdVdsGroupPercentage(getQuotaThresholdVdsGroup()); quota.setThresholdStoragePercentage(getQuotaThresholdStorage()); quota.setGraceVdsGroupPercentage(getQuotaGraceVdsGroup()); quota.setGraceStoragePercentage(getQuotaGraceStorage()); quota.setQuotaVdsGroups(new ArrayList<QuotaVdsGroup>()); quota.setQuotaStorages(new ArrayList<QuotaStorage>()); // Set Quota storage capacity definition. QuotaStorage quotaStorage = new QuotaStorage(); quotaStorage.setStorageSizeGB(UNLIMITED); quota.setGlobalQuotaStorage(quotaStorage); // Set Quota cluster virtual memory definition and virtual CPU definition. QuotaVdsGroup quotaVdsGroup = new QuotaVdsGroup(); quotaVdsGroup.setVirtualCpu(UNLIMITED.intValue()); quotaVdsGroup.setMemSizeMB(UNLIMITED); quota.setGlobalQuotaVdsGroup(quotaVdsGroup); return quota; } /** * generate a new name for default quota that not exists in the system * @param storagePool * @return new unused default quota name */ public String generateDefaultQuotaName(storage_pool storagePool) { String quotaName = getDefaultQuotaName(storagePool.getname()); return getQuotaDAO().getDefaultQuotaName(quotaName); } public String getDefaultQuotaName(storage_pool storagePool) { return getDefaultQuotaName(storagePool.getname()); } public String getDefaultQuotaName(String storagePoolName) { return DEFAULT_QUOTA_NAME_PERFIX + storagePoolName; } public boolean checkQuotaValidationForAdd(Quota quota, List<String> messages) { // All common checks if (!checkQuotaValidationCommon(quota, messages)) { return false; } // Check quota added is not default quota. if (quota.getIsDefaultQuota()) { messages.add(VdcBllMessages.ACTION_TYPE_FAILED_QUOTA_CAN_NOT_HAVE_DEFAULT_INDICATION.toString()); return false; } return true; } public boolean checkQuotaValidationForEdit(Quota quota, List<String> messages) { // All common checks if (!checkQuotaValidationCommon(quota, messages)) { return false; } // Check editing the default quota is not allowed for a disabled DC // Note that the check is made vs. the existing quota in the database, // in order to prevent making the quota not default if the DC has quota disabled Quota oldQuota = getQuotaDAO().getById(quota.getId()); if (oldQuota != null && oldQuota.getIsDefaultQuota() && oldQuota.getQuotaEnforcementType() == QuotaEnforcementTypeEnum.DISABLED) { messages.add(VdcBllMessages.ACTION_TYPE_FAILED_QUOTA_CAN_NOT_HAVE_DEFAULT_INDICATION.toString()); return false; } return true; } private boolean checkQuotaValidationCommon(Quota quota, List<String> messages) { if (quota == null) { messages.add(VdcBllMessages.ACTION_TYPE_FAILED_QUOTA_IS_NOT_VALID.toString()); return false; } // Check if quota name's prefix isn't reserved. // In edit - Once a quota is edited, it stops being the default quota, so the default prefix is not allowed // In add - the default quota is generated by the AddEmptyStoragePoolCommand, and does not pass in this flow. // If a user manually tries to add a quota with this reserved name, it should not be allowed. if (!checkQuotaNamePrefixReserved(quota, messages)) { return false; } // Check if quota name exists. if (!checkQuotaNameExisting(quota, messages)) { return false; } // If specific Quota for storage is specified if (!validateQuotaStorageLimitation(quota, messages)) { return false; } // If specific Quota for VDS Group is specific if (!validateQuotaVdsGroupLimitation(quota, messages)) { return false; } return true; } /** * Save new <code>Quota</code> with permissions for ad_element_id to consume from. * * @param quota * - The quota to be saved * @param ad_element_id * - The user which will have consume permissions on the quota. * @param reuse * - whether to update an existing quota or create a new one */ public void saveQuotaForUser(Quota quota, Guid ad_element_id) { saveOrUpdateQuotaForUser(quota, ad_element_id, false); } /** * Save <code>Quota</code> with permissions for ad_element_id to consume from. * * @param quota * - The quota to be saved * @param ad_element_id * - The user which will have consume permissions on the quota. * @param reuse * - whether to update an existing quota or create a new one */ public void saveOrUpdateQuotaForUser(Quota quota, Guid ad_element_id, boolean reuse) { if (reuse) { getQuotaDAO().update(quota); } else { getQuotaDAO().save(quota); } permissions perm = new permissions(ad_element_id, PredefinedRoles.QUOTA_CONSUMER.getId(), quota.getId(), VdcObjectType.Quota); PermissionsOperationsParametes permParams = new PermissionsOperationsParametes(perm); Backend.getInstance().runInternalAction(VdcActionType.AddPermission, permParams); } /** * Helper method which get as an input disk image list for VM or template and returns a list of quotas and their * desired limitation to be used.<BR/> * * @param diskImages * - The disk image list to be grouped by * @param NumberOfVms * - Number of VMs when creating the pool. * @param blockSparseInitSizeInGB * - The initial size of sparse block size. * @return List of summarized requested size for quota. */ public Map<Pair<Guid, Guid>, Double> getQuotaConsumeMapForVmPool(Collection<DiskImage> diskImages, Integer NumberOfVms, Integer blockSparseInitSizeInGB) { Map<Pair<Guid, Guid>, Double> quotaForStorageConsumption = new HashMap<Pair<Guid, Guid>, Double>(); for (DiskImage disk : diskImages) { Pair<Guid, Guid> quotaForStorageKey = new Pair<Guid, Guid>(disk.getQuotaId(), disk.getstorage_ids().get(0)); Long sizeRequested = disk.getsize() * NumberOfVms * blockSparseInitSizeInGB; Double storageRequest = quotaForStorageConsumption.get(quotaForStorageKey); if (storageRequest != null) { storageRequest += sizeRequested; } else { storageRequest = new Double(sizeRequested); } quotaForStorageConsumption.put(quotaForStorageKey, storageRequest); } return quotaForStorageConsumption; } /** * Helper method which get as an input disk image list for VM or template and returns a list of quotas and their * desired limitation to be used.<BR/> * * @param diskImages * - The disk image list to be grouped by * @return List of summarized requested size for quota. */ public Map<Pair<Guid, Guid>, Double> getQuotaConsumeMap(Collection<DiskImage> diskImages) { Map<Pair<Guid, Guid>, Double> quotaForStorageConsumption = new HashMap<Pair<Guid, Guid>, Double>(); for (DiskImage disk : diskImages) { Pair<Guid, Guid> quotaForStorageKey = new Pair<Guid, Guid>(disk.getQuotaId(), disk.getstorage_ids().get(0).getValue()); Double storageRequest = quotaForStorageConsumption.get(quotaForStorageKey); if (storageRequest != null) { storageRequest += disk.getsize(); } else { storageRequest = new Double(disk.getsize()); } quotaForStorageConsumption.put(quotaForStorageKey, storageRequest); } return quotaForStorageConsumption; } public boolean checkQuotaNameExisting(Quota quota, List<String> messages) { Quota quotaByName = getQuotaDAO().getQuotaByQuotaName(quota.getQuotaName()); // Check if there is no quota with the same name that already exists. if ((quotaByName != null) && (!quotaByName.getId().equals(quota.getId()))) { messages.add(VdcBllMessages.ACTION_TYPE_FAILED_QUOTA_NAME_ALREADY_EXISTS.toString()); return false; } return true; } /** * Checks the the quota's name does not begin in {@link #DEFAULT_QUOTA_NAME_PERFIX}, which is reserved for default quotas. * @param quota The quota to check. * @param messages A {@link List} of messages that the relevant error message will be appended to in needed. * @return <code>true</code> if the quota's name is OK, <code>false</code> otherwise. */ public boolean checkQuotaNamePrefixReserved(Quota quota, List<String> messages) { if (quota.getQuotaName().startsWith(DEFAULT_QUOTA_NAME_PERFIX)) { messages.add(VdcBllMessages.ACTION_TYPE_FAILED_QUOTA_NAME_RESERVED_FOR_DEFAULT.toString()); return false; } return true; } /** * Validate Quota storage restrictions. * * @param quota * @param messages * @return */ private static boolean validateQuotaStorageLimitation(Quota quota, List<String> messages) { boolean isValid = true; List<QuotaStorage> quotaStorageList = quota.getQuotaStorages(); if (quota.isGlobalStorageQuota() && (quotaStorageList != null && !quotaStorageList.isEmpty())) { messages.add(VdcBllMessages.ACTION_TYPE_FAILED_QUOTA_LIMIT_IS_SPECIFIC_AND_GENERAL.toString()); isValid = false; } return isValid; } /** * Validate Quota vds group restrictions. * * @param quota * - Quota we validate * @param messages * - Messages of can do action. * @return Boolean value if the quota is valid or not. */ private static boolean validateQuotaVdsGroupLimitation(Quota quota, List<String> messages) { boolean isValid = true; List<QuotaVdsGroup> quotaVdsGroupList = quota.getQuotaVdsGroups(); if (quotaVdsGroupList != null && !quotaVdsGroupList.isEmpty()) { boolean isSpecificVirtualCpu = false; boolean isSpecificVirtualRam = false; for (QuotaVdsGroup quotaVdsGroup : quotaVdsGroupList) { if (quotaVdsGroup.getVirtualCpu() != null) { isSpecificVirtualCpu = true; } if (quotaVdsGroup.getMemSizeMB() != null) { isSpecificVirtualRam = true; } } // if the global vds group limit was not specified, then specific limitation must be specified. if (quota.isGlobalVdsGroupQuota() && (isSpecificVirtualRam || isSpecificVirtualCpu)) { messages.add(VdcBllMessages.ACTION_TYPE_FAILED_QUOTA_LIMIT_IS_SPECIFIC_AND_GENERAL.toString()); isValid = false; } } return isValid; } /** * @return The VdsGroupDAO */ protected VdsGroupDAO getVdsGroupDao() { return DbFacade.getInstance().getVdsGroupDAO(); } /** * @return The StorageDomainDAO */ protected StorageDomainDAO getStorageDomainDao() { return DbFacade.getInstance().getStorageDomainDAO(); } /** * @return The quotaDAO */ protected QuotaDAO getQuotaDAO() { return DbFacade.getInstance().getQuotaDAO(); } /** * @return The StoragePoolDAO */ protected StoragePoolDAO getStoragePoolDao() { return DbFacade.getInstance().getStoragePoolDAO(); } /** @return The VDS Group's quota threshold */ protected int getQuotaThresholdVdsGroup() { return getIntegerConfig(ConfigValues.QuotaThresholdVdsGroup); } /** @return The Storage's quota threshold */ protected int getQuotaThresholdStorage() { return getIntegerConfig(ConfigValues.QuotaThresholdStorage); } /** @return The VDS Group's quota grace */ protected int getQuotaGraceVdsGroup() { return getIntegerConfig(ConfigValues.QuotaGraceVdsGroup); } /** @return The Storage's quota grace */ protected int getQuotaGraceStorage() { return getIntegerConfig(ConfigValues.QuotaGraceStorage); } /** * @param value The required configuration value * @return The appropriate int-value from the configuration */ private static int getIntegerConfig(ConfigValues value) { return Config.<Integer> GetValue(value); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.tools; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntryType; import org.apache.hadoop.fs.permission.AclEntryScope; import org.apache.hadoop.fs.permission.AclUtil; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.io.WritableUtils; import com.google.common.base.Objects; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * CopyListingFileStatus is a specialized subclass of {@link FileStatus} for * attaching additional data members useful to distcp. This class does not * override {@link FileStatus#compareTo}, because the additional data members * are not relevant to sort order. */ @InterfaceAudience.Private public final class CopyListingFileStatus extends FileStatus { private static final byte NO_ACL_ENTRIES = -1; private static final int NO_XATTRS = -1; // Retain static arrays of enum values to prevent repeated allocation of new // arrays during deserialization. private static final AclEntryType[] ACL_ENTRY_TYPES = AclEntryType.values(); private static final AclEntryScope[] ACL_ENTRY_SCOPES = AclEntryScope.values(); private static final FsAction[] FS_ACTIONS = FsAction.values(); private List<AclEntry> aclEntries; private Map<String, byte[]> xAttrs; /** * Default constructor. */ public CopyListingFileStatus() { } /** * Creates a new CopyListingFileStatus by copying the members of the given * FileStatus. * * @param fileStatus FileStatus to copy */ public CopyListingFileStatus(FileStatus fileStatus) throws IOException { super(fileStatus); } /** * Returns the full logical ACL. * * @return List containing full logical ACL */ public List<AclEntry> getAclEntries() { return AclUtil.getAclFromPermAndEntries(getPermission(), aclEntries != null ? aclEntries : Collections.<AclEntry>emptyList()); } /** * Sets optional ACL entries. * * @param aclEntries List containing all ACL entries */ public void setAclEntries(List<AclEntry> aclEntries) { this.aclEntries = aclEntries; } /** * Returns all xAttrs. * * @return Map containing all xAttrs */ public Map<String, byte[]> getXAttrs() { return xAttrs != null ? xAttrs : Collections.<String, byte[]>emptyMap(); } /** * Sets optional xAttrs. * * @param xAttrs Map containing all xAttrs */ public void setXAttrs(Map<String, byte[]> xAttrs) { this.xAttrs = xAttrs; } @Override public void write(DataOutput out) throws IOException { super.write(out); if (aclEntries != null) { // byte is sufficient, because 32 ACL entries is the max enforced by HDFS. out.writeByte(aclEntries.size()); for (AclEntry entry: aclEntries) { out.writeByte(entry.getScope().ordinal()); out.writeByte(entry.getType().ordinal()); WritableUtils.writeString(out, entry.getName()); out.writeByte(entry.getPermission().ordinal()); } } else { out.writeByte(NO_ACL_ENTRIES); } if (xAttrs != null) { out.writeInt(xAttrs.size()); Iterator<Entry<String, byte[]>> iter = xAttrs.entrySet().iterator(); while (iter.hasNext()) { Entry<String, byte[]> entry = iter.next(); WritableUtils.writeString(out, entry.getKey()); final byte[] value = entry.getValue(); if (value != null) { out.writeInt(value.length); if (value.length > 0) { out.write(value); } } else { out.writeInt(-1); } } } else { out.writeInt(NO_XATTRS); } } @Override public void readFields(DataInput in) throws IOException { super.readFields(in); byte aclEntriesSize = in.readByte(); if (aclEntriesSize != NO_ACL_ENTRIES) { aclEntries = Lists.newArrayListWithCapacity(aclEntriesSize); for (int i = 0; i < aclEntriesSize; ++i) { aclEntries.add(new AclEntry.Builder() .setScope(ACL_ENTRY_SCOPES[in.readByte()]) .setType(ACL_ENTRY_TYPES[in.readByte()]) .setName(WritableUtils.readString(in)) .setPermission(FS_ACTIONS[in.readByte()]) .build()); } } else { aclEntries = null; } int xAttrsSize = in.readInt(); if (xAttrsSize != NO_XATTRS) { xAttrs = Maps.newHashMap(); for (int i = 0; i < xAttrsSize; ++i) { final String name = WritableUtils.readString(in); final int valueLen = in.readInt(); byte[] value = null; if (valueLen > -1) { value = new byte[valueLen]; if (valueLen > 0) { in.readFully(value); } } xAttrs.put(name, value); } } else { xAttrs = null; } } @Override public boolean equals(Object o) { if (!super.equals(o)) { return false; } if (getClass() != o.getClass()) { return false; } CopyListingFileStatus other = (CopyListingFileStatus)o; return Objects.equal(aclEntries, other.aclEntries) && Objects.equal(xAttrs, other.xAttrs); } @Override public int hashCode() { return Objects.hashCode(super.hashCode(), aclEntries, xAttrs); } @Override public String toString() { StringBuilder sb = new StringBuilder(super.toString()); sb.append('{'); sb.append("aclEntries = " + aclEntries); sb.append(", xAttrs = " + xAttrs); sb.append('}'); return sb.toString(); } }
// Copyright 2013 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.dfp.axis.utils.v201403; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import com.google.api.ads.dfp.axis.v201403.BooleanValue; import com.google.api.ads.dfp.axis.v201403.ColumnType; import com.google.api.ads.dfp.axis.v201403.Date; import com.google.api.ads.dfp.axis.v201403.DateTime; import com.google.api.ads.dfp.axis.v201403.DateTimeValue; import com.google.api.ads.dfp.axis.v201403.DateValue; import com.google.api.ads.dfp.axis.v201403.NumberValue; import com.google.api.ads.dfp.axis.v201403.ResultSet; import com.google.api.ads.dfp.axis.v201403.Row; import com.google.api.ads.dfp.axis.v201403.TextValue; import com.google.api.ads.dfp.axis.v201403.Value; import com.google.common.collect.Lists; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Test for {@link Pql}. * * @author Adam Rogal */ @RunWith(JUnit4.class) public class PqlTest { private static final String TIME_ZONE_ID1 = "Asia/Shanghai"; private ColumnType column1; private ColumnType column2; private ColumnType column3; private TextValue textValue1; private TextValue textValue2; private TextValue textValue3; private BooleanValue booleanValue1; private BooleanValue booleanValue2; private BooleanValue booleanValue3; private NumberValue numberValue1; private NumberValue numberValue2; private NumberValue numberValue3; private NumberValue numberValue4; private NumberValue numberValue5; private DateValue dateValue1; private DateTimeValue dateTimeValue1; private DateTime dateTime1; private Date date1; public PqlTest() {} @Before public void setUp() throws Exception { column1 = new ColumnType(); column1.setLabelName("column1"); column2 = new ColumnType(); column2.setLabelName("column2"); column3 = new ColumnType(); column3.setLabelName("column3"); textValue1 = new TextValue(); textValue1.setValue("value1"); textValue2 = new TextValue(); textValue2.setValue("value2"); textValue3 = new TextValue(); textValue3.setValue("value3"); booleanValue1 = new BooleanValue(); booleanValue1.setValue(false); booleanValue2 = new BooleanValue(); booleanValue2.setValue(true); booleanValue3 = new BooleanValue(); booleanValue3.setValue(false); numberValue1 = new NumberValue(); numberValue1.setValue("1"); numberValue2 = new NumberValue(); numberValue2.setValue("1.02"); numberValue3 = new NumberValue(); numberValue3.setValue("-1"); numberValue4 = new NumberValue(); numberValue4.setValue(""); numberValue5 = new NumberValue(); numberValue5.setValue(null); dateTime1 = new DateTime(); date1 = new Date(); date1.setYear(2012); date1.setMonth(12); date1.setDay(2); dateTime1.setDate(date1); dateTime1.setHour(12); dateTime1.setMinute(45); dateTime1.setSecond(0); dateTime1.setTimeZoneID(TIME_ZONE_ID1); dateTimeValue1 = new DateTimeValue(); dateTimeValue1.setValue(dateTime1); dateValue1 = new DateValue(); dateValue1.setValue(date1); } @Test public void testToString() { assertEquals("value1", Pql.toString(textValue1)); assertEquals("false", Pql.toString(booleanValue1)); assertEquals("1", Pql.toString(numberValue1)); assertEquals("2012-12-02T12:45:00+08:00", Pql.toString(dateTimeValue1)); assertEquals("2012-12-02", Pql.toString(dateValue1)); } @Test public void testToString_null() { assertEquals("", Pql.toString(new TextValue())); assertEquals("", Pql.toString(new BooleanValue())); assertEquals("", Pql.toString(new NumberValue())); assertEquals("", Pql.toString(new DateTimeValue())); assertEquals("", Pql.toString(new DateValue())); } @Test(expected = IllegalArgumentException.class) public void testToString_invalidValue() { Pql.toString(new MyValue()); } @Test public void testGetApiValue() { assertEquals("value1", Pql.getApiValue(textValue1)); assertEquals(false, Pql.getApiValue(booleanValue1)); assertEquals(1L, Pql.getApiValue(numberValue1)); assertEquals(1.02, Pql.getApiValue(numberValue2)); assertEquals(-1L, Pql.getApiValue(numberValue3)); assertEquals(null, Pql.getApiValue(numberValue4)); assertEquals(null, Pql.getApiValue(numberValue5)); assertEquals(dateTime1, Pql.getApiValue(dateTimeValue1)); assertEquals(date1, Pql.getApiValue(dateValue1)); assertNull(Pql.getApiValue(new TextValue())); } @Test public void testGetNativeValue() { assertEquals("value1", Pql.getNativeValue(textValue1)); assertEquals(false, Pql.getNativeValue(booleanValue1)); assertEquals(1L, Pql.getNativeValue(numberValue1)); assertEquals(1.02, Pql.getNativeValue(numberValue2)); assertEquals(-1L, Pql.getNativeValue(numberValue3)); assertEquals(null, Pql.getNativeValue(numberValue4)); assertEquals(null, Pql.getNativeValue(numberValue5)); assertEquals(DateTimes.toDateTime(dateTimeValue1.getValue()), Pql.getNativeValue(dateTimeValue1)); assertEquals("2012-12-02", Pql.getNativeValue(dateValue1)); } @Test public void testCreateValue() { assertEquals("value1", ((TextValue) Pql.createValue("value1")).getValue()); assertEquals(false, ((BooleanValue) Pql.createValue(false)).getValue()); assertEquals("1", ((NumberValue) Pql.createValue(1)).getValue()); assertEquals("1", ((NumberValue) Pql.createValue(1L)).getValue()); assertEquals("1.02", ((NumberValue) Pql.createValue(1.02)).getValue()); assertEquals("2012-12-02T12:45:00+08:00", DateTimes.toStringWithTimeZone(((DateTimeValue) Pql.createValue(dateTime1)).getValue())); assertEquals("2012-12-02", DateTimes.toString(((DateValue) Pql.createValue(dateTime1.getDate())).getValue())); } @Test(expected = IllegalArgumentException.class) public void testCreateValue_invalidType() { Pql.createValue(new MyObject()); } @Test public void testCreateValue_null() { assertEquals(null, ((TextValue) Pql.createValue(null)).getValue()); } @Test public void testGetColumnLabels() { ResultSet resultSet = new ResultSet(); resultSet.setColumnTypes(new ColumnType[] {column1, column2, column3}); assertEquals(Lists.newArrayList(new String[] {"column1", "column2", "column3"}), Pql.getColumnLabels(resultSet)); } @Test public void testGetRowStringValues() { Row row = new Row(); row.setValues(new Value[] {textValue1, booleanValue1, numberValue2}); assertEquals( Lists.newArrayList(new String[] {"value1", "false", "1.02"}), Pql.getRowStringValues(row)); } @Test public void testCombineResultSet() { Row row1 = new Row(); row1.setValues(new Value[] {textValue1, booleanValue1, numberValue1}); Row row2 = new Row(); row2.setValues(new Value[] {textValue2, booleanValue2, numberValue2}); Row row3 = new Row(); row3.setValues(new Value[] {textValue3, booleanValue3, numberValue3}); ResultSet resultSet1 = new ResultSet(); resultSet1.setColumnTypes(new ColumnType[] {column1, column2, column3}); resultSet1.setRows(new Row[] {row1, row2}); ResultSet resultSet2 = new ResultSet(); resultSet2.setColumnTypes(new ColumnType[] {column1, column2, column3}); resultSet2.setRows(new Row[] {row3}); ResultSet combinedResultSet = Pql.combineResultSets(resultSet1, resultSet2); assertEquals(3, combinedResultSet.getRows().length); assertArrayEquals( new ColumnType[] {column1, column2, column3}, combinedResultSet.getColumnTypes()); assertArrayEquals(new Value[] {textValue1, booleanValue1, numberValue1}, combinedResultSet.getRows()[0].getValues()); assertArrayEquals(new Value[] {textValue2, booleanValue2, numberValue2}, combinedResultSet.getRows()[1].getValues()); assertArrayEquals(new Value[] {textValue3, booleanValue3, numberValue3}, combinedResultSet.getRows()[2].getValues()); } @Test(expected = IllegalArgumentException.class) public void testCombineResultSet_badColumns() { Row row1 = new Row(); row1.setValues(new Value[] {textValue1, booleanValue1, numberValue1}); Row row2 = new Row(); row2.setValues(new Value[] {textValue2, booleanValue2, numberValue2}); Row row3 = new Row(); row3.setValues(new Value[] {textValue3, booleanValue3}); ResultSet resultSet1 = new ResultSet(); resultSet1.setColumnTypes(new ColumnType[] {column1, column2, column3}); resultSet1.setRows(new Row[] {row1, row2}); ResultSet resultSet2 = new ResultSet(); resultSet2.setColumnTypes(new ColumnType[] {column1, column2}); resultSet2.setRows(new Row[] {row3}); Pql.combineResultSets(resultSet1, resultSet2); } private static class MyValue extends Value {} private static class MyObject extends Object {} }
/* * Copyright 2009 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.jboss.netty.handler.codec.string; import static org.junit.Assert.*; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Random; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicReference; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.handler.codec.frame.DelimiterBasedFrameDecoder; import org.jboss.netty.handler.codec.frame.Delimiters; import org.jboss.netty.util.CharsetUtil; import org.jboss.netty.util.TestUtil; import org.jboss.netty.util.internal.ExecutorUtil; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; /** * @author <a href="http://www.jboss.org/netty/">The Netty Project</a> * @author <a href="http://gleamynode.net/">Trustin Lee</a> * * @version $Rev: 2119 $, $Date: 2010-02-01 20:46:09 +0900 (Mon, 01 Feb 2010) $ * */ public abstract class AbstractSocketStringEchoTest { static final Random random = new Random(); static final String[] data = new String[1024]; private static ExecutorService executor; static { for (int i = 0; i < data.length; i ++) { int eLen = random.nextInt(512); char[] e = new char[eLen]; for (int j = 0; j < eLen; j ++) { e[j] = (char) ('a' + random.nextInt(26)); } data[i] = new String(e); } } @BeforeClass public static void init() { executor = Executors.newCachedThreadPool(); } @AfterClass public static void destroy() { ExecutorUtil.terminate(executor); } protected abstract ChannelFactory newServerSocketChannelFactory(Executor executor); protected abstract ChannelFactory newClientSocketChannelFactory(Executor executor); @Test public void testStringEcho() throws Throwable { ServerBootstrap sb = new ServerBootstrap(newServerSocketChannelFactory(executor)); ClientBootstrap cb = new ClientBootstrap(newClientSocketChannelFactory(executor)); EchoHandler sh = new EchoHandler(); EchoHandler ch = new EchoHandler(); sb.getPipeline().addLast("framer", new DelimiterBasedFrameDecoder(512, Delimiters.lineDelimiter())); sb.getPipeline().addLast("decoder", new StringDecoder(CharsetUtil.ISO_8859_1)); sb.getPipeline().addBefore("decoder", "encoder", new StringEncoder(CharsetUtil.ISO_8859_1)); sb.getPipeline().addAfter("decoder", "handler", sh); cb.getPipeline().addLast("framer", new DelimiterBasedFrameDecoder(512, Delimiters.lineDelimiter())); cb.getPipeline().addLast("decoder", new StringDecoder(CharsetUtil.ISO_8859_1)); cb.getPipeline().addBefore("decoder", "encoder", new StringEncoder(CharsetUtil.ISO_8859_1)); cb.getPipeline().addAfter("decoder", "handler", ch); Channel sc = sb.bind(new InetSocketAddress(0)); int port = ((InetSocketAddress) sc.getLocalAddress()).getPort(); ChannelFuture ccf = cb.connect(new InetSocketAddress(TestUtil.getLocalHost(), port)); assertTrue(ccf.awaitUninterruptibly().isSuccess()); Channel cc = ccf.getChannel(); for (String element : data) { String delimiter = random.nextBoolean() ? "\r\n" : "\n"; cc.write(element + delimiter); } while (ch.counter < data.length) { if (sh.exception.get() != null) { break; } if (ch.exception.get() != null) { break; } try { Thread.sleep(1); } catch (InterruptedException e) { // Ignore. } } while (sh.counter < data.length) { if (sh.exception.get() != null) { break; } if (ch.exception.get() != null) { break; } try { Thread.sleep(1); } catch (InterruptedException e) { // Ignore. } } sh.channel.close().awaitUninterruptibly(); ch.channel.close().awaitUninterruptibly(); sc.close().awaitUninterruptibly(); if (sh.exception.get() != null && !(sh.exception.get() instanceof IOException)) { throw sh.exception.get(); } if (ch.exception.get() != null && !(ch.exception.get() instanceof IOException)) { throw ch.exception.get(); } if (sh.exception.get() != null) { throw sh.exception.get(); } if (ch.exception.get() != null) { throw ch.exception.get(); } } private class EchoHandler extends SimpleChannelUpstreamHandler { volatile Channel channel; final AtomicReference<Throwable> exception = new AtomicReference<Throwable>(); volatile int counter; EchoHandler() { super(); } @Override public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception { channel = e.getChannel(); } @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception { String m = (String) e.getMessage(); assertEquals(data[counter], m); if (channel.getParent() != null) { String delimiter = random.nextBoolean() ? "\r\n" : "\n"; channel.write(m + delimiter); } counter ++; } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception { if (exception.compareAndSet(null, e.getCause())) { e.getChannel().close(); } } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2012.02.03 at 05:17:30 PM GMT+05:30 // package jlibs.wadl.model; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlAnyElement; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.namespace.QName; import org.w3c.dom.Element; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{http://wadl.dev.java.net/2009/02}doc" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{http://wadl.dev.java.net/2009/02}request" minOccurs="0"/> * &lt;element ref="{http://wadl.dev.java.net/2009/02}response" maxOccurs="unbounded" minOccurs="0"/> * &lt;any processContents='lax' namespace='##other' maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;attribute name="id" type="{http://www.w3.org/2001/XMLSchema}ID" /> * &lt;attribute name="name" type="{http://wadl.dev.java.net/2009/02}Method" /> * &lt;attribute name="href" type="{http://www.w3.org/2001/XMLSchema}anyURI" /> * &lt;anyAttribute processContents='lax' namespace='##other'/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "doc", "request", "response", "any" }) @XmlRootElement(name = "method") public class Method { protected List<Doc> doc; protected Request request; protected List<Response> response; @XmlAnyElement(lax = true) protected List<Object> any; @XmlAttribute @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute protected String name; @XmlAttribute @XmlSchemaType(name = "anyURI") protected String href; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Gets the value of the doc property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the doc property. * * <p> * For example, to add a new item, do as follows: * <pre> * getDoc().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Doc } * * */ public List<Doc> getDoc() { if (doc == null) { doc = new ArrayList<Doc>(); } return this.doc; } /** * Gets the value of the request property. * * @return * possible object is * {@link Request } * */ public Request getRequest() { return request; } /** * Sets the value of the request property. * * @param value * allowed object is * {@link Request } * */ public void setRequest(Request value) { this.request = value; } /** * Gets the value of the response property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the response property. * * <p> * For example, to add a new item, do as follows: * <pre> * getResponse().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Response } * * */ public List<Response> getResponse() { if (response == null) { response = new ArrayList<Response>(); } return this.response; } /** * Gets the value of the any property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the any property. * * <p> * For example, to add a new item, do as follows: * <pre> * getAny().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Element } * {@link Object } * * */ public List<Object> getAny() { if (any == null) { any = new ArrayList<Object>(); } return this.any; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.blockmanagement; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.StripedFileTestUtil; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.Whitebox; import org.apache.log4j.Level; import org.junit.After; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; public class TestReconstructStripedBlocksWithRackAwareness { public static final Logger LOG = LoggerFactory.getLogger( TestReconstructStripedBlocksWithRackAwareness.class); static { GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.ALL); GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.ALL); GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL); } private final ErasureCodingPolicy ecPolicy = StripedFileTestUtil.getDefaultECPolicy(); private final int cellSize = ecPolicy.getCellSize(); private final short dataBlocks = (short) ecPolicy.getNumDataUnits(); private final short parityBlocks = (short) ecPolicy.getNumParityUnits(); private final String[] hosts = getHosts(dataBlocks + parityBlocks + 1); private final String[] racks = getRacks(dataBlocks + parityBlocks + 1, dataBlocks); private static String[] getHosts(int numHosts) { String[] hosts = new String[numHosts]; for (int i = 0; i < hosts.length; i++) { hosts[i] = "host" + (i + 1); } return hosts; } private static String[] getRacks(int numHosts, int numRacks) { String[] racks = new String[numHosts]; int numHostEachRack = numHosts / numRacks; int residue = numHosts % numRacks; int j = 0; for (int i = 1; i <= numRacks; i++) { int limit = i <= residue ? numHostEachRack + 1 : numHostEachRack; for (int k = 0; k < limit; k++) { racks[j++] = "/r" + i; } } assert j == numHosts; return racks; } private MiniDFSCluster cluster; private static final HdfsConfiguration conf = new HdfsConfiguration(); private DistributedFileSystem fs; @BeforeClass public static void setup() throws Exception { conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_CONSIDERLOAD_KEY, false); conf.setInt(DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY, 1); } @After public void tearDown() { if (cluster != null) { cluster.shutdown(); } } private MiniDFSCluster.DataNodeProperties stopDataNode(String hostname) throws IOException { MiniDFSCluster.DataNodeProperties dnProp = null; for (int i = 0; i < cluster.getDataNodes().size(); i++) { DataNode dn = cluster.getDataNodes().get(i); if (dn.getDatanodeId().getHostName().equals(hostname)) { dnProp = cluster.stopDataNode(i); cluster.setDataNodeDead(dn.getDatanodeId()); LOG.info("stop datanode " + dn.getDatanodeId().getHostName()); } } return dnProp; } private DataNode getDataNode(String host) { for (DataNode dn : cluster.getDataNodes()) { if (dn.getDatanodeId().getHostName().equals(host)) { return dn; } } return null; } /** * When there are all the internal blocks available but they are not placed on * enough racks, NameNode should avoid normal decoding reconstruction but copy * an internal block to a new rack. * * In this test, we first need to create a scenario that a striped block has * all the internal blocks but distributed in <6 racks. Then we check if the * redundancy monitor can correctly schedule the reconstruction work for it. */ @Test public void testReconstructForNotEnoughRacks() throws Exception { LOG.info("cluster hosts: {}, racks: {}", Arrays.asList(hosts), Arrays.asList(racks)); cluster = new MiniDFSCluster.Builder(conf).racks(racks).hosts(hosts) .numDataNodes(hosts.length).build(); cluster.waitActive(); fs = cluster.getFileSystem(); fs.enableErasureCodingPolicy( StripedFileTestUtil.getDefaultECPolicy().getName()); fs.setErasureCodingPolicy(new Path("/"), StripedFileTestUtil.getDefaultECPolicy().getName()); FSNamesystem fsn = cluster.getNamesystem(); BlockManager bm = fsn.getBlockManager(); MiniDFSCluster.DataNodeProperties lastHost = stopDataNode( hosts[hosts.length - 1]); final Path file = new Path("/foo"); // the file's block is in 9 dn but 5 racks DFSTestUtil.createFile(fs, file, cellSize * dataBlocks * 2, (short) 1, 0L); Assert.assertEquals(0, bm.numOfUnderReplicatedBlocks()); final INodeFile fileNode = fsn.getFSDirectory() .getINode4Write(file.toString()).asFile(); BlockInfoStriped blockInfo = (BlockInfoStriped) fileNode.getLastBlock(); // we now should have 9 internal blocks distributed in 5 racks Set<String> rackSet = new HashSet<>(); for (DatanodeStorageInfo storage : blockInfo.storages) { rackSet.add(storage.getDatanodeDescriptor().getNetworkLocation()); } Assert.assertEquals(dataBlocks - 1, rackSet.size()); // restart the stopped datanode cluster.restartDataNode(lastHost); cluster.waitActive(); // make sure we have 6 racks again NetworkTopology topology = bm.getDatanodeManager().getNetworkTopology(); Assert.assertEquals(hosts.length, topology.getNumOfLeaves()); Assert.assertEquals(dataBlocks, topology.getNumOfRacks()); // pause all the heartbeats for (DataNode dn : cluster.getDataNodes()) { DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, true); } fsn.writeLock(); try { bm.processMisReplicatedBlocks(); } finally { fsn.writeUnlock(); } // check if redundancy monitor correctly schedule the reconstruction work. boolean scheduled = false; for (int i = 0; i < 5; i++) { // retry 5 times for (DatanodeStorageInfo storage : blockInfo.storages) { if (storage != null) { DatanodeDescriptor dn = storage.getDatanodeDescriptor(); Assert.assertEquals(0, dn.getNumberOfBlocksToBeErasureCoded()); if (dn.getNumberOfBlocksToBeReplicated() == 1) { scheduled = true; } } } if (scheduled) { break; } Thread.sleep(1000); } Assert.assertTrue(scheduled); } @Test public void testChooseExcessReplicasToDelete() throws Exception { cluster = new MiniDFSCluster.Builder(conf).racks(racks).hosts(hosts) .numDataNodes(hosts.length).build(); cluster.waitActive(); fs = cluster.getFileSystem(); fs.enableErasureCodingPolicy( StripedFileTestUtil.getDefaultECPolicy().getName()); fs.setErasureCodingPolicy(new Path("/"), StripedFileTestUtil.getDefaultECPolicy().getName()); MiniDFSCluster.DataNodeProperties lastHost = stopDataNode( hosts[hosts.length - 1]); final Path file = new Path("/foo"); DFSTestUtil.createFile(fs, file, cellSize * dataBlocks * 2, (short) 1, 0L); // stop host1 MiniDFSCluster.DataNodeProperties host1 = stopDataNode("host1"); // bring last host back cluster.restartDataNode(lastHost); cluster.waitActive(); // wait for reconstruction to finish final short blockNum = (short) (dataBlocks + parityBlocks); DFSTestUtil.waitForReplication(fs, file, blockNum, 15 * 1000); // restart host1 cluster.restartDataNode(host1); cluster.waitActive(); for (DataNode dn : cluster.getDataNodes()) { if (dn.getDatanodeId().getHostName().equals("host1")) { DataNodeTestUtils.triggerBlockReport(dn); break; } } // make sure the excess replica is detected, and we delete host1's replica // so that we have 6 racks DFSTestUtil.waitForReplication(fs, file, blockNum, 15 * 1000); LocatedBlocks blks = fs.getClient().getLocatedBlocks(file.toString(), 0); LocatedStripedBlock block = (LocatedStripedBlock) blks.getLastLocatedBlock(); for (DatanodeInfo dn : block.getLocations()) { Assert.assertFalse(dn.getHostName().equals("host1")); } } /** * In case we have 10 internal blocks on 5 racks, where 9 of blocks are live * and 1 decommissioning, make sure the reconstruction happens correctly. */ @Test public void testReconstructionWithDecommission() throws Exception { final String[] rackNames = getRacks(dataBlocks + parityBlocks + 2, dataBlocks); final String[] hostNames = getHosts(dataBlocks + parityBlocks + 2); // we now have 11 hosts on 6 racks with distribution: 2-2-2-2-2-1 cluster = new MiniDFSCluster.Builder(conf).racks(rackNames).hosts(hostNames) .numDataNodes(hostNames.length).build(); cluster.waitActive(); fs = cluster.getFileSystem(); fs.enableErasureCodingPolicy( StripedFileTestUtil.getDefaultECPolicy().getName()); fs.setErasureCodingPolicy(new Path("/"), StripedFileTestUtil.getDefaultECPolicy().getName()); final BlockManager bm = cluster.getNamesystem().getBlockManager(); final DatanodeManager dm = bm.getDatanodeManager(); // stop h9 and h10 and create a file with 6+3 internal blocks MiniDFSCluster.DataNodeProperties h9 = stopDataNode(hostNames[hostNames.length - 3]); MiniDFSCluster.DataNodeProperties h10 = stopDataNode(hostNames[hostNames.length - 2]); final Path file = new Path("/foo"); DFSTestUtil.createFile(fs, file, cellSize * dataBlocks * 2, (short) 1, 0L); final BlockInfo blockInfo = cluster.getNamesystem().getFSDirectory() .getINode(file.toString()).asFile().getLastBlock(); // bring h9 back cluster.restartDataNode(h9); cluster.waitActive(); // stop h11 so that the reconstruction happens MiniDFSCluster.DataNodeProperties h11 = stopDataNode(hostNames[hostNames.length - 1]); boolean recovered = bm.countNodes(blockInfo).liveReplicas() >= dataBlocks + parityBlocks; for (int i = 0; i < 10 & !recovered; i++) { Thread.sleep(1000); recovered = bm.countNodes(blockInfo).liveReplicas() >= dataBlocks + parityBlocks; } Assert.assertTrue(recovered); // mark h9 as decommissioning DataNode datanode9 = getDataNode(hostNames[hostNames.length - 3]); Assert.assertNotNull(datanode9); final DatanodeDescriptor dn9 = dm.getDatanode(datanode9.getDatanodeId()); dn9.startDecommission(); // restart h10 and h11 cluster.restartDataNode(h10); cluster.restartDataNode(h11); cluster.waitActive(); DataNodeTestUtils.triggerBlockReport( getDataNode(hostNames[hostNames.length - 1])); // start decommissioning h9 boolean satisfied = bm.isPlacementPolicySatisfied(blockInfo); Assert.assertFalse(satisfied); final DatanodeAdminManager decomManager = (DatanodeAdminManager) Whitebox.getInternalState( dm, "datanodeAdminManager"); cluster.getNamesystem().writeLock(); try { dn9.stopDecommission(); decomManager.startDecommission(dn9); } finally { cluster.getNamesystem().writeUnlock(); } // make sure the decommission finishes and the block in on 6 racks boolean decommissioned = dn9.isDecommissioned(); for (int i = 0; i < 10 && !decommissioned; i++) { Thread.sleep(1000); decommissioned = dn9.isDecommissioned(); } Assert.assertTrue(decommissioned); Assert.assertTrue(bm.isPlacementPolicySatisfied(blockInfo)); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: Documents/Types/DocumentTypeDescription.proto package Diadoc.Api.Proto.Documents.Types; public final class DocumentTypeDescriptionProtos { private DocumentTypeDescriptionProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface DetectedDocumentTypeOrBuilder extends // @@protoc_insertion_point(interface_extends:Diadoc.Api.Proto.Documents.Types.DetectedDocumentType) com.google.protobuf.MessageOrBuilder { /** * <code>required string TypeNamedId = 1;</code> */ boolean hasTypeNamedId(); /** * <code>required string TypeNamedId = 1;</code> */ java.lang.String getTypeNamedId(); /** * <code>required string TypeNamedId = 1;</code> */ com.google.protobuf.ByteString getTypeNamedIdBytes(); /** * <code>required string Function = 2;</code> */ boolean hasFunction(); /** * <code>required string Function = 2;</code> */ java.lang.String getFunction(); /** * <code>required string Function = 2;</code> */ com.google.protobuf.ByteString getFunctionBytes(); /** * <code>required string Version = 3;</code> */ boolean hasVersion(); /** * <code>required string Version = 3;</code> */ java.lang.String getVersion(); /** * <code>required string Version = 3;</code> */ com.google.protobuf.ByteString getVersionBytes(); } /** * Protobuf type {@code Diadoc.Api.Proto.Documents.Types.DetectedDocumentType} */ public static final class DetectedDocumentType extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Diadoc.Api.Proto.Documents.Types.DetectedDocumentType) DetectedDocumentTypeOrBuilder { // Use DetectedDocumentType.newBuilder() to construct. private DetectedDocumentType(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DetectedDocumentType(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DetectedDocumentType defaultInstance; public static DetectedDocumentType getDefaultInstance() { return defaultInstance; } public DetectedDocumentType getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DetectedDocumentType( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; typeNamedId_ = bs; break; } case 18: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; function_ = bs; break; } case 26: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; version_ = bs; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.class, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder.class); } public static com.google.protobuf.Parser<DetectedDocumentType> PARSER = new com.google.protobuf.AbstractParser<DetectedDocumentType>() { public DetectedDocumentType parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DetectedDocumentType(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DetectedDocumentType> getParserForType() { return PARSER; } private int bitField0_; public static final int TYPENAMEDID_FIELD_NUMBER = 1; private java.lang.Object typeNamedId_; /** * <code>required string TypeNamedId = 1;</code> */ public boolean hasTypeNamedId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string TypeNamedId = 1;</code> */ public java.lang.String getTypeNamedId() { java.lang.Object ref = typeNamedId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { typeNamedId_ = s; } return s; } } /** * <code>required string TypeNamedId = 1;</code> */ public com.google.protobuf.ByteString getTypeNamedIdBytes() { java.lang.Object ref = typeNamedId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); typeNamedId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FUNCTION_FIELD_NUMBER = 2; private java.lang.Object function_; /** * <code>required string Function = 2;</code> */ public boolean hasFunction() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string Function = 2;</code> */ public java.lang.String getFunction() { java.lang.Object ref = function_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { function_ = s; } return s; } } /** * <code>required string Function = 2;</code> */ public com.google.protobuf.ByteString getFunctionBytes() { java.lang.Object ref = function_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); function_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VERSION_FIELD_NUMBER = 3; private java.lang.Object version_; /** * <code>required string Version = 3;</code> */ public boolean hasVersion() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string Version = 3;</code> */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { version_ = s; } return s; } } /** * <code>required string Version = 3;</code> */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { typeNamedId_ = ""; function_ = ""; version_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasTypeNamedId()) { memoizedIsInitialized = 0; return false; } if (!hasFunction()) { memoizedIsInitialized = 0; return false; } if (!hasVersion()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getTypeNamedIdBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getFunctionBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, getVersionBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getTypeNamedIdBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getFunctionBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getVersionBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Diadoc.Api.Proto.Documents.Types.DetectedDocumentType} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:Diadoc.Api.Proto.Documents.Types.DetectedDocumentType) Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.class, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder.class); } // Construct using Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); typeNamedId_ = ""; bitField0_ = (bitField0_ & ~0x00000001); function_ = ""; bitField0_ = (bitField0_ & ~0x00000002); version_ = ""; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_descriptor; } public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType getDefaultInstanceForType() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.getDefaultInstance(); } public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType build() { Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType buildPartial() { Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType result = new Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.typeNamedId_ = typeNamedId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.function_ = function_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.version_ = version_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType) { return mergeFrom((Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType other) { if (other == Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.getDefaultInstance()) return this; if (other.hasTypeNamedId()) { bitField0_ |= 0x00000001; typeNamedId_ = other.typeNamedId_; onChanged(); } if (other.hasFunction()) { bitField0_ |= 0x00000002; function_ = other.function_; onChanged(); } if (other.hasVersion()) { bitField0_ |= 0x00000004; version_ = other.version_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTypeNamedId()) { return false; } if (!hasFunction()) { return false; } if (!hasVersion()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object typeNamedId_ = ""; /** * <code>required string TypeNamedId = 1;</code> */ public boolean hasTypeNamedId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string TypeNamedId = 1;</code> */ public java.lang.String getTypeNamedId() { java.lang.Object ref = typeNamedId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { typeNamedId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string TypeNamedId = 1;</code> */ public com.google.protobuf.ByteString getTypeNamedIdBytes() { java.lang.Object ref = typeNamedId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); typeNamedId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string TypeNamedId = 1;</code> */ public Builder setTypeNamedId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; typeNamedId_ = value; onChanged(); return this; } /** * <code>required string TypeNamedId = 1;</code> */ public Builder clearTypeNamedId() { bitField0_ = (bitField0_ & ~0x00000001); typeNamedId_ = getDefaultInstance().getTypeNamedId(); onChanged(); return this; } /** * <code>required string TypeNamedId = 1;</code> */ public Builder setTypeNamedIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; typeNamedId_ = value; onChanged(); return this; } private java.lang.Object function_ = ""; /** * <code>required string Function = 2;</code> */ public boolean hasFunction() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string Function = 2;</code> */ public java.lang.String getFunction() { java.lang.Object ref = function_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { function_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string Function = 2;</code> */ public com.google.protobuf.ByteString getFunctionBytes() { java.lang.Object ref = function_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); function_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string Function = 2;</code> */ public Builder setFunction( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; function_ = value; onChanged(); return this; } /** * <code>required string Function = 2;</code> */ public Builder clearFunction() { bitField0_ = (bitField0_ & ~0x00000002); function_ = getDefaultInstance().getFunction(); onChanged(); return this; } /** * <code>required string Function = 2;</code> */ public Builder setFunctionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; function_ = value; onChanged(); return this; } private java.lang.Object version_ = ""; /** * <code>required string Version = 3;</code> */ public boolean hasVersion() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string Version = 3;</code> */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { version_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string Version = 3;</code> */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string Version = 3;</code> */ public Builder setVersion( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; version_ = value; onChanged(); return this; } /** * <code>required string Version = 3;</code> */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000004); version_ = getDefaultInstance().getVersion(); onChanged(); return this; } /** * <code>required string Version = 3;</code> */ public Builder setVersionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; version_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:Diadoc.Api.Proto.Documents.Types.DetectedDocumentType) } static { defaultInstance = new DetectedDocumentType(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:Diadoc.Api.Proto.Documents.Types.DetectedDocumentType) } public interface DetectDocumentTypesResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:Diadoc.Api.Proto.Documents.Types.DetectDocumentTypesResponse) com.google.protobuf.MessageOrBuilder { /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ java.util.List<Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType> getDocumentTypesList(); /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType getDocumentTypes(int index); /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ int getDocumentTypesCount(); /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ java.util.List<? extends Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder> getDocumentTypesOrBuilderList(); /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder getDocumentTypesOrBuilder( int index); } /** * Protobuf type {@code Diadoc.Api.Proto.Documents.Types.DetectDocumentTypesResponse} */ public static final class DetectDocumentTypesResponse extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Diadoc.Api.Proto.Documents.Types.DetectDocumentTypesResponse) DetectDocumentTypesResponseOrBuilder { // Use DetectDocumentTypesResponse.newBuilder() to construct. private DetectDocumentTypesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DetectDocumentTypesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DetectDocumentTypesResponse defaultInstance; public static DetectDocumentTypesResponse getDefaultInstance() { return defaultInstance; } public DetectDocumentTypesResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DetectDocumentTypesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { documentTypes_ = new java.util.ArrayList<Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType>(); mutable_bitField0_ |= 0x00000001; } documentTypes_.add(input.readMessage(Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { documentTypes_ = java.util.Collections.unmodifiableList(documentTypes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse.class, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse.Builder.class); } public static com.google.protobuf.Parser<DetectDocumentTypesResponse> PARSER = new com.google.protobuf.AbstractParser<DetectDocumentTypesResponse>() { public DetectDocumentTypesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DetectDocumentTypesResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DetectDocumentTypesResponse> getParserForType() { return PARSER; } public static final int DOCUMENTTYPES_FIELD_NUMBER = 1; private java.util.List<Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType> documentTypes_; /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public java.util.List<Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType> getDocumentTypesList() { return documentTypes_; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public java.util.List<? extends Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder> getDocumentTypesOrBuilderList() { return documentTypes_; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public int getDocumentTypesCount() { return documentTypes_.size(); } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType getDocumentTypes(int index) { return documentTypes_.get(index); } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder getDocumentTypesOrBuilder( int index) { return documentTypes_.get(index); } private void initFields() { documentTypes_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getDocumentTypesCount(); i++) { if (!getDocumentTypes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < documentTypes_.size(); i++) { output.writeMessage(1, documentTypes_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < documentTypes_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, documentTypes_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Diadoc.Api.Proto.Documents.Types.DetectDocumentTypesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:Diadoc.Api.Proto.Documents.Types.DetectDocumentTypesResponse) Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse.class, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse.Builder.class); } // Construct using Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getDocumentTypesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (documentTypesBuilder_ == null) { documentTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { documentTypesBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_descriptor; } public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse getDefaultInstanceForType() { return Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse.getDefaultInstance(); } public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse build() { Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse buildPartial() { Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse result = new Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse(this); int from_bitField0_ = bitField0_; if (documentTypesBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { documentTypes_ = java.util.Collections.unmodifiableList(documentTypes_); bitField0_ = (bitField0_ & ~0x00000001); } result.documentTypes_ = documentTypes_; } else { result.documentTypes_ = documentTypesBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse) { return mergeFrom((Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse other) { if (other == Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse.getDefaultInstance()) return this; if (documentTypesBuilder_ == null) { if (!other.documentTypes_.isEmpty()) { if (documentTypes_.isEmpty()) { documentTypes_ = other.documentTypes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDocumentTypesIsMutable(); documentTypes_.addAll(other.documentTypes_); } onChanged(); } } else { if (!other.documentTypes_.isEmpty()) { if (documentTypesBuilder_.isEmpty()) { documentTypesBuilder_.dispose(); documentTypesBuilder_ = null; documentTypes_ = other.documentTypes_; bitField0_ = (bitField0_ & ~0x00000001); documentTypesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getDocumentTypesFieldBuilder() : null; } else { documentTypesBuilder_.addAllMessages(other.documentTypes_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getDocumentTypesCount(); i++) { if (!getDocumentTypes(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectDocumentTypesResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType> documentTypes_ = java.util.Collections.emptyList(); private void ensureDocumentTypesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { documentTypes_ = new java.util.ArrayList<Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType>(documentTypes_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder> documentTypesBuilder_; /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public java.util.List<Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType> getDocumentTypesList() { if (documentTypesBuilder_ == null) { return java.util.Collections.unmodifiableList(documentTypes_); } else { return documentTypesBuilder_.getMessageList(); } } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public int getDocumentTypesCount() { if (documentTypesBuilder_ == null) { return documentTypes_.size(); } else { return documentTypesBuilder_.getCount(); } } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType getDocumentTypes(int index) { if (documentTypesBuilder_ == null) { return documentTypes_.get(index); } else { return documentTypesBuilder_.getMessage(index); } } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder setDocumentTypes( int index, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType value) { if (documentTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDocumentTypesIsMutable(); documentTypes_.set(index, value); onChanged(); } else { documentTypesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder setDocumentTypes( int index, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder builderForValue) { if (documentTypesBuilder_ == null) { ensureDocumentTypesIsMutable(); documentTypes_.set(index, builderForValue.build()); onChanged(); } else { documentTypesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder addDocumentTypes(Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType value) { if (documentTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDocumentTypesIsMutable(); documentTypes_.add(value); onChanged(); } else { documentTypesBuilder_.addMessage(value); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder addDocumentTypes( int index, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType value) { if (documentTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDocumentTypesIsMutable(); documentTypes_.add(index, value); onChanged(); } else { documentTypesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder addDocumentTypes( Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder builderForValue) { if (documentTypesBuilder_ == null) { ensureDocumentTypesIsMutable(); documentTypes_.add(builderForValue.build()); onChanged(); } else { documentTypesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder addDocumentTypes( int index, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder builderForValue) { if (documentTypesBuilder_ == null) { ensureDocumentTypesIsMutable(); documentTypes_.add(index, builderForValue.build()); onChanged(); } else { documentTypesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder addAllDocumentTypes( java.lang.Iterable<? extends Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType> values) { if (documentTypesBuilder_ == null) { ensureDocumentTypesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, documentTypes_); onChanged(); } else { documentTypesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder clearDocumentTypes() { if (documentTypesBuilder_ == null) { documentTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { documentTypesBuilder_.clear(); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Builder removeDocumentTypes(int index) { if (documentTypesBuilder_ == null) { ensureDocumentTypesIsMutable(); documentTypes_.remove(index); onChanged(); } else { documentTypesBuilder_.remove(index); } return this; } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder getDocumentTypesBuilder( int index) { return getDocumentTypesFieldBuilder().getBuilder(index); } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder getDocumentTypesOrBuilder( int index) { if (documentTypesBuilder_ == null) { return documentTypes_.get(index); } else { return documentTypesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public java.util.List<? extends Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder> getDocumentTypesOrBuilderList() { if (documentTypesBuilder_ != null) { return documentTypesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(documentTypes_); } } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder addDocumentTypesBuilder() { return getDocumentTypesFieldBuilder().addBuilder( Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.getDefaultInstance()); } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder addDocumentTypesBuilder( int index) { return getDocumentTypesFieldBuilder().addBuilder( index, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.getDefaultInstance()); } /** * <code>repeated .Diadoc.Api.Proto.Documents.Types.DetectedDocumentType DocumentTypes = 1;</code> */ public java.util.List<Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder> getDocumentTypesBuilderList() { return getDocumentTypesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder> getDocumentTypesFieldBuilder() { if (documentTypesBuilder_ == null) { documentTypesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentType.Builder, Diadoc.Api.Proto.Documents.Types.DocumentTypeDescriptionProtos.DetectedDocumentTypeOrBuilder>( documentTypes_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); documentTypes_ = null; } return documentTypesBuilder_; } // @@protoc_insertion_point(builder_scope:Diadoc.Api.Proto.Documents.Types.DetectDocumentTypesResponse) } static { defaultInstance = new DetectDocumentTypesResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:Diadoc.Api.Proto.Documents.Types.DetectDocumentTypesResponse) } private static final com.google.protobuf.Descriptors.Descriptor internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n-Documents/Types/DocumentTypeDescriptio" + "n.proto\022 Diadoc.Api.Proto.Documents.Type" + "s\032\036Invoicing/ExtendedSigner.proto\"N\n\024Det" + "ectedDocumentType\022\023\n\013TypeNamedId\030\001 \002(\t\022\020" + "\n\010Function\030\002 \002(\t\022\017\n\007Version\030\003 \002(\t\"l\n\033Det" + "ectDocumentTypesResponse\022M\n\rDocumentType" + "s\030\001 \003(\01326.Diadoc.Api.Proto.Documents.Typ" + "es.DetectedDocumentTypeB\037B\035DocumentTypeD" + "escriptionProtos" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { Diadoc.Api.Proto.Invoicing.Signers.ExtendedSignerProtos.getDescriptor(), }, assigner); internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Diadoc_Api_Proto_Documents_Types_DetectedDocumentType_descriptor, new java.lang.String[] { "TypeNamedId", "Function", "Version", }); internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Diadoc_Api_Proto_Documents_Types_DetectDocumentTypesResponse_descriptor, new java.lang.String[] { "DocumentTypes", }); Diadoc.Api.Proto.Invoicing.Signers.ExtendedSignerProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
/* * #%L * wcm.io * %% * Copyright (C) 2018 wcm.io * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.wcm.caravan.hal.comparison.impl; import static io.wcm.caravan.hal.comparison.HalDifference.ChangeType.ADDITIONAL; import static io.wcm.caravan.hal.comparison.HalDifference.ChangeType.MISSING; import static io.wcm.caravan.hal.comparison.HalDifference.ChangeType.MODIFIED; import static io.wcm.caravan.hal.comparison.HalDifference.ChangeType.REORDERED; import static io.wcm.caravan.hal.comparison.HalDifference.EntityType.EMBEDDED; import static io.wcm.caravan.hal.comparison.HalDifference.EntityType.LINK; import static io.wcm.caravan.hal.comparison.HalDifference.EntityType.PROPERTY; import static io.wcm.caravan.hal.comparison.testing.HalDifferenceAssertions.assertOnlyOneDifference; import static io.wcm.caravan.hal.comparison.testing.StandardRelations.ITEM; import static io.wcm.caravan.hal.comparison.testing.StandardRelations.SECTION; import static org.junit.Assert.assertEquals; import java.util.List; import org.junit.Before; import org.junit.Test; import io.wcm.caravan.hal.comparison.HalComparisonContext; import io.wcm.caravan.hal.comparison.HalComparisonStrategy; import io.wcm.caravan.hal.comparison.HalDifference; import io.wcm.caravan.hal.comparison.testing.TestHalComparisonStrategy; import io.wcm.caravan.hal.comparison.testing.resources.TestResource; import io.wcm.caravan.hal.comparison.testing.resources.TestResourceTree; import rx.Observable; public class HalComparisonImplTest { private TestResourceTree expected; private TestResourceTree actual; private HalComparisonStrategy strategy; private HalComparisonImpl comparison; @Before public void setUp() throws Exception { expected = new TestResourceTree(); actual = new TestResourceTree(); comparison = new HalComparisonImpl(); } private List<HalDifference> findDifferences() { // use the default config unless a test case has created a specific strategy instance if (strategy == null) { strategy = new HalComparisonStrategy() { // only use the default implementations from the interface }; } Observable<HalDifference> diffs = comparison.compare(expected, actual, strategy); return diffs.toList().toBlocking().single(); } @Test public void different_entrypoint_should_be_detected() throws Exception { expected.getEntryPoint().setText("foo"); actual.getEntryPoint().setText("bar"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/$.text"); } @Test public void different_embedded_resource_should_be_detected() throws Exception { expected.createEmbedded(ITEM).setNumber(123); actual.createEmbedded(ITEM).setNumber(456); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/item$.number"); } @Test public void missing_embedded_resource_should_be_detected() throws Exception { expected.createEmbedded(ITEM); expected.createEmbedded(ITEM); actual.createEmbedded(ITEM); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MISSING, EMBEDDED, "/item[1]"); } @Test public void additional_embedded_resource_should_be_detected() throws Exception { expected.createEmbedded(ITEM); actual.createEmbedded(ITEM); actual.createEmbedded(ITEM); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, ADDITIONAL, EMBEDDED, "/item[1]"); } @Test public void different_nameless_linked_resource_should_be_detected() throws Exception { expected.createLinked(ITEM).setText("abc"); actual.createLinked(ITEM).setText("def"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/item$.text"); } @Test public void missing_nameless_linked_resource_should_be_detected() throws Exception { expected.createLinked(ITEM); expected.createLinked(ITEM); actual.createLinked(ITEM); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MISSING, LINK, "/item[1]"); } @Test public void additional_nameless_linked_resource_should_be_detected() throws Exception { expected.createLinked(ITEM); actual.createLinked(ITEM); actual.createLinked(ITEM); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, ADDITIONAL, LINK, "/item[1]"); } @Test public void different_named_linked_resource_should_be_detected() throws Exception { expected.createLinked(ITEM, "common").setFlag(true); actual.createLinked(ITEM, "common").setFlag(false); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/item['common']$.flag"); } @Test public void missing_named_linked_resource_at_beginning_should_be_detected() throws Exception { expected.createLinked(ITEM, "removed"); expected.createLinked(ITEM, "common"); actual.createLinked(ITEM, "common"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MISSING, LINK, "/item[0]"); } @Test public void additional_named_linked_resource_at_beginning_should_be_detected() throws Exception { expected.createLinked(ITEM, "common"); actual.createLinked(ITEM, "added"); actual.createLinked(ITEM, "common"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, ADDITIONAL, LINK, "/item[0]"); } @Test public void reordered_named_linked_resources_should_be_detected() throws Exception { expected.createLinked(ITEM, "first"); expected.createLinked(ITEM, "second"); actual.createLinked(ITEM, "second"); actual.createLinked(ITEM, "first"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, REORDERED, LINK, "/item"); } @Test public void expected_and_actual_url_for_entry_point_should_be_set() throws Exception { expected.getEntryPoint().setText("foo"); actual.getEntryPoint().setText("bar"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/$.text"); HalComparisonContext context = diff.get(0).getHalContext(); assertEquals(expected.getEntryPointUrl(), context.getExpectedUrl()); assertEquals(actual.getEntryPointUrl(), context.getActualUrl()); } @Test public void expected_and_actual_url_for_linked_resources_should_point_to_linked_resource() throws Exception { TestResource expectedSection = expected.createLinked(SECTION); TestResource expectedItem = expectedSection.createLinked(ITEM).setText("foo"); TestResource actualSection = actual.createLinked(SECTION); TestResource actualItem = actualSection.createLinked(ITEM).setText("bar"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/section/item$.text"); HalComparisonContext context = diff.get(0).getHalContext(); assertEquals(expectedItem.getUrl(), context.getExpectedUrl()); assertEquals(actualItem.getUrl(), context.getActualUrl()); } @Test public void expected_and_actual_url_for_embedded_resources_should_point_to_context_resource() throws Exception { TestResource expectedSection = expected.createLinked(SECTION); expectedSection.createEmbedded(ITEM).setText("foo"); TestResource actualSection = actual.createLinked(SECTION); actualSection.createEmbedded(ITEM).setText("bar"); List<HalDifference> diffs = findDifferences(); assertOnlyOneDifference(diffs, MODIFIED, PROPERTY, "/section/item$.text"); HalComparisonContext context = diffs.get(0).getHalContext(); assertEquals(expectedSection.getUrl(), context.getExpectedUrl()); assertEquals(actualSection.getUrl(), context.getActualUrl()); } @Test public void configuration_for_ignored_embedded_relations_should_be_respected() throws Exception { expected.createEmbedded(SECTION).setNumber(123) .createEmbedded(ITEM).setText("foo"); actual.createEmbedded(SECTION).setNumber(456) .createEmbedded(ITEM).setText("bar"); strategy = new TestHalComparisonStrategy().addEmbeddedRelationToIgnore(ITEM); List<HalDifference> diff = findDifferences(); // only the difference for the section should be reported, not the one for the item assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/section$.number"); } @Test public void configuration_for_ignored_link_relations_should_be_respected() throws Exception { expected.createLinked(SECTION).setNumber(123) .createLinked(ITEM).setText("foo"); actual.createLinked(SECTION).setNumber(456) .createLinked(ITEM).setText("bar"); strategy = new TestHalComparisonStrategy().addLinkRelationToIgnore(ITEM); List<HalDifference> diff = findDifferences(); // only the difference for the section should be reported, not the one for the item assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/section$.number"); } @Test public void indices_should_be_included_in_halpath_for_multiple_embedded_resources_with_same_relation() { expected.createEmbedded(ITEM).setText("foo"); expected.createEmbedded(ITEM).setText("foo"); expected.createEmbedded(ITEM).setText("foo"); actual.createEmbedded(ITEM).setText("foo"); actual.createEmbedded(ITEM).setText("bar"); actual.createEmbedded(ITEM).setText("foo"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/item[1]$.text"); } @Test public void indices_should_be_included_in_halpath_for_multiple_unnamed_linked_resources_with_same_relation() { expected.createLinked(ITEM).setText("foo"); expected.createLinked(ITEM).setText("foo"); expected.createLinked(ITEM).setText("foo"); actual.createLinked(ITEM).setText("foo"); actual.createLinked(ITEM).setText("bar"); actual.createLinked(ITEM).setText("foo"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/item[1]$.text"); } @Test public void name_should_be_included_in_halpath_for_multiple_named_linked_resources_with_same_relation() { expected.createLinked(ITEM, "name1").setText("foo"); expected.createLinked(ITEM, "name2").setText("foo"); expected.createLinked(ITEM, "name3").setText("foo"); actual.createLinked(ITEM, "name1").setText("foo"); actual.createLinked(ITEM, "name2").setText("bar"); actual.createLinked(ITEM, "name3").setText("foo"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/item['name2']$.text"); } @Test public void indices_should_be_included_in_halpath_for_different_array_values() { expected.createEmbedded(ITEM).setArray("a", "b", "c"); actual.createEmbedded(ITEM).setArray("a", "b", "foo"); List<HalDifference> diff = findDifferences(); assertOnlyOneDifference(diff, MODIFIED, PROPERTY, "/item$.array[2]"); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.management.mbean; import java.util.List; import java.util.Map; import javax.management.openmbean.CompositeData; import javax.management.openmbean.CompositeDataSupport; import javax.management.openmbean.CompositeType; import javax.management.openmbean.TabularData; import javax.management.openmbean.TabularDataSupport; import org.apache.camel.CamelContext; import org.apache.camel.CatalogCamelContext; import org.apache.camel.Processor; import org.apache.camel.Route; import org.apache.camel.RuntimeCamelException; import org.apache.camel.ServiceStatus; import org.apache.camel.StatefulService; import org.apache.camel.api.management.ManagedInstance; import org.apache.camel.api.management.ManagedResource; import org.apache.camel.api.management.mbean.CamelOpenMBeanTypes; import org.apache.camel.api.management.mbean.ManagedProcessorMBean; import org.apache.camel.model.ModelHelper; import org.apache.camel.model.ProcessorDefinition; import org.apache.camel.model.ProcessorDefinitionHelper; import org.apache.camel.model.StepDefinition; import org.apache.camel.spi.ManagementStrategy; import org.apache.camel.support.JSonSchemaHelper; import org.apache.camel.support.service.ServiceHelper; @ManagedResource(description = "Managed Processor") public class ManagedProcessor extends ManagedPerformanceCounter implements ManagedInstance, ManagedProcessorMBean { private final CamelContext context; private final Processor processor; private final ProcessorDefinition<?> definition; private final String id; private String stepId; private Route route; public ManagedProcessor(CamelContext context, Processor processor, ProcessorDefinition<?> definition) { this.context = context; this.processor = processor; this.definition = definition; this.id = definition.idOrCreate(context.getNodeIdFactory()); StepDefinition step; if (definition instanceof StepDefinition) { step = (StepDefinition) definition; } else { step = ProcessorDefinitionHelper.findFirstParentOfType(StepDefinition.class, definition, true); } this.stepId = step != null ? step.idOrCreate(context.getNodeIdFactory()) : null; } @Override public void init(ManagementStrategy strategy) { super.init(strategy); boolean enabled = context.getManagementStrategy().getManagementAgent().getStatisticsLevel().isDefaultOrExtended(); setStatisticsEnabled(enabled); } public CamelContext getContext() { return context; } public Object getInstance() { return processor; } public Processor getProcessor() { return processor; } public ProcessorDefinition<?> getDefinition() { return definition; } public String getId() { return id; } public String getStepId() { return stepId; } public Integer getIndex() { return definition.getIndex(); } public Boolean getSupportExtendedInformation() { return false; } public Route getRoute() { return route; } public void setRoute(Route route) { this.route = route; } public String getState() { // must use String type to be sure remote JMX can read the attribute without requiring Camel classes. if (processor instanceof StatefulService) { ServiceStatus status = ((StatefulService) processor).getStatus(); return status.name(); } // assume started if not a ServiceSupport instance return ServiceStatus.Started.name(); } public String getCamelId() { return context.getName(); } public String getCamelManagementName() { return context.getManagementName(); } public String getRouteId() { if (route != null) { return route.getId(); } return null; } public String getProcessorId() { return id; } public void start() throws Exception { if (!context.getStatus().isStarted()) { throw new IllegalArgumentException("CamelContext is not started"); } ServiceHelper.startService(getProcessor()); } public void stop() throws Exception { if (!context.getStatus().isStarted()) { throw new IllegalArgumentException("CamelContext is not started"); } ServiceHelper.stopService(getProcessor()); } public String informationJson() { return context.adapt(CatalogCamelContext.class).explainEipJson(id, true); } public TabularData explain(boolean allOptions) { try { String json = context.adapt(CatalogCamelContext.class).explainEipJson(id, allOptions); List<Map<String, String>> rows = JSonSchemaHelper.parseJsonSchema("properties", json, true); TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.explainEipTabularType()); for (Map<String, String> row : rows) { String name = row.get("name"); String kind = row.get("kind"); String label = row.get("label") != null ? row.get("label") : ""; String type = row.get("type"); String javaType = row.get("javaType"); String deprecated = row.get("deprecated") != null ? row.get("deprecated") : ""; String value = row.get("value") != null ? row.get("value") : ""; String defaultValue = row.get("defaultValue") != null ? row.get("defaultValue") : ""; String description = row.get("description") != null ? row.get("description") : ""; CompositeType ct = CamelOpenMBeanTypes.explainEipsCompositeType(); CompositeData data = new CompositeDataSupport(ct, new String[]{"option", "kind", "label", "type", "java type", "deprecated", "value", "default value", "description"}, new Object[]{name, kind, label, type, javaType, deprecated, value, defaultValue, description}); answer.put(data); } return answer; } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } } @Override public String dumpProcessorAsXml() throws Exception { return ModelHelper.dumpModelAsXml(context, definition); } }
package com.bt.pi.app.instancemanager.handlers; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isA; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.invocation.InvocationOnMock; import org.mockito.runners.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import com.bt.pi.app.common.entities.Instance; import com.bt.pi.app.common.entities.InstanceState; import com.bt.pi.app.common.entities.User; import com.bt.pi.app.common.entities.Volume; import com.bt.pi.app.common.id.PiIdBuilder; import com.bt.pi.app.common.resource.PiQueue; import com.bt.pi.app.instancemanager.images.InstanceImageManager; import com.bt.pi.app.networkmanager.net.VirtualNetworkBuilder; import com.bt.pi.core.application.watcher.task.TaskProcessingQueueHelper; import com.bt.pi.core.continuation.UpdateResolvingContinuation; import com.bt.pi.core.continuation.UpdateResolvingPiContinuation; import com.bt.pi.core.dht.DhtClientFactory; import com.bt.pi.core.dht.DhtWriter; import com.bt.pi.core.entity.PiEntity; import com.bt.pi.core.id.PId; @RunWith(MockitoJUnitRunner.class) public class TerminateInstanceHandlerTest { @InjectMocks private TerminateInstanceHandler terminateInstanceHandler = new TerminateInstanceHandler(); private static final String INSTANCE_ID = "i-123"; private Instance instance; @Mock private InstanceImageManager instanceImageManager; @Mock private DhtClientFactory dhtClientFactory; @Mock private DhtWriter dhtWriter; @Mock private ThreadPoolTaskExecutor taskExecutor; @Mock private VirtualNetworkBuilder virtualNetworkBuilder; @Mock private TerminateInstanceEventListener terminateInstanceEventListener; private CountDownLatch latch; private Volume volume; @Mock private PiIdBuilder piIdBuilder; @Mock private PId instancePId; @Mock private PId volumePId; @Mock private TaskProcessingQueueHelper taskProcessingQueueHelper; @Mock private PId terminateInstanceQueuePId; @Mock private PId removeInstanceFromUserEntityQueuePId; private User user; private String userId = "fred"; @Mock private PId userPId; @SuppressWarnings("unchecked") @Before public void setUp() throws Exception { setupInstance(); setupUser(); when(piIdBuilder.getPIdForEc2AvailabilityZone(isA(Instance.class))).thenReturn(instancePId); when(piIdBuilder.getPIdForEc2AvailabilityZone(isA(String.class))).thenReturn(volumePId); when(piIdBuilder.getPiQueuePId(PiQueue.TERMINATE_INSTANCE)).thenReturn(terminateInstanceQueuePId); when(piIdBuilder.getPId(User.getUrl(userId))).thenReturn(userPId); when(terminateInstanceQueuePId.forLocalScope(PiQueue.TERMINATE_INSTANCE.getNodeScope())).thenReturn(terminateInstanceQueuePId); when(piIdBuilder.getPiQueuePId(PiQueue.REMOVE_INSTANCE_FROM_USER)).thenReturn(removeInstanceFromUserEntityQueuePId); when(removeInstanceFromUserEntityQueuePId.forLocalScope(PiQueue.REMOVE_INSTANCE_FROM_USER.getNodeScope())).thenReturn(removeInstanceFromUserEntityQueuePId); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { Runnable r = (Runnable) invocation.getArguments()[0]; return new Thread(r); } }).when(taskExecutor).createThread(isA(Runnable.class)); latch = new CountDownLatch(1); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { latch.countDown(); return null; } }).when(terminateInstanceEventListener).instanceTerminated(isA(Instance.class)); doAnswer(new Answer<Object>() { @Override public PiEntity answer(InvocationOnMock invocation) throws Throwable { UpdateResolvingPiContinuation continuation = (UpdateResolvingPiContinuation) invocation.getArguments()[1]; continuation.update(instance, null); continuation.handleResult(instance); return null; } }).when(dhtWriter).update(eq(instancePId), isA(UpdateResolvingContinuation.class)); doAnswer(new Answer<Object>() { @Override public PiEntity answer(InvocationOnMock invocation) throws Throwable { UpdateResolvingPiContinuation continuation = (UpdateResolvingPiContinuation) invocation.getArguments()[1]; continuation.update(user, null); continuation.handleResult(user); return null; } }).when(dhtWriter).update(eq(userPId), isA(UpdateResolvingPiContinuation.class)); doAnswer(new Answer<Object>() { @Override public PiEntity answer(InvocationOnMock invocation) throws Throwable { UpdateResolvingPiContinuation continuation = (UpdateResolvingPiContinuation) invocation.getArguments()[1]; continuation.update(volume, null); continuation.handleResult(volume); return null; } }).when(dhtWriter).update(eq(volumePId), isA(UpdateResolvingContinuation.class)); when(dhtClientFactory.createWriter()).thenReturn(dhtWriter); } private void setupUser() { this.user = new User(); user.addInstance(INSTANCE_ID); } private void setupInstance() { instance = new Instance(); instance.setInstanceId(INSTANCE_ID); instance.setVlanId(1234); instance.setSecurityGroupName("securityGroupName"); instance.setUserId(userId); } @Test public void shouldStopInstance() throws Exception { // act terminateInstanceHandler.terminateInstance(instance); // assert assertTrue(latch.await(2000, TimeUnit.MILLISECONDS)); verify(instanceImageManager).stopInstance(instance); } @Test public void shouldDestroyInstance() throws Exception { // act terminateInstanceHandler.terminateInstance(instance); // assert assertTrue(latch.await(2000, TimeUnit.MILLISECONDS)); verify(instanceImageManager).stopInstance(instance); } @Test public void shouldUpdateDhtWithInstanceTerminated() throws Exception { // act terminateInstanceHandler.terminateInstance(instance); // assert assertTrue(latch.await(2000, TimeUnit.MILLISECONDS)); assertEquals(InstanceState.TERMINATED, instance.getState()); } @Test public void shouldStopNetworkWhenInstanceTerminated() throws Exception { // act terminateInstanceHandler.terminateInstance(instance); // assert assertTrue(latch.await(2000, TimeUnit.MILLISECONDS)); verify(virtualNetworkBuilder).tearDownVirtualNetworkForInstance(instance.getVlanId(), instance.getInstanceId()); } @Test public void shouldNotifyListenerWhenInstanceTerminated() throws Exception { // act terminateInstanceHandler.terminateInstance(instance); // assert assertTrue(latch.await(2000, TimeUnit.MILLISECONDS)); verify(terminateInstanceEventListener).instanceTerminated(instance); } @Test public void shouldRemoveTaskItemFromQueueAfterTerminateInstance() throws Exception { // setup // act terminateInstanceHandler.terminateInstance(instance); // assert assertTrue(latch.await(2000, TimeUnit.MILLISECONDS)); verify(taskProcessingQueueHelper).removeUrlFromQueue(terminateInstanceQueuePId, instance.getUrl()); } @Test public void shouldAddTaskItemToRemoveInstanceFromUserEntity() throws InterruptedException { // setup // act terminateInstanceHandler.terminateInstance(instance); // assert assertTrue(latch.await(2000, TimeUnit.MILLISECONDS)); verify(taskProcessingQueueHelper).addUrlToQueue(removeInstanceFromUserEntityQueuePId, instance.getUrl(), 5); } @Test public void shouldMarkInstanceAsTerminatedInUserEntity() throws InterruptedException { // setup // act terminateInstanceHandler.terminateInstance(instance); // assert assertTrue(latch.await(2000, TimeUnit.MILLISECONDS)); assertFalse(user.getImageIds().contains(INSTANCE_ID)); assertTrue(user.getTerminatedInstanceIds().contains(INSTANCE_ID)); } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Part of the test suite for the WebView's Java Bridge. This class tests that * we correctly convert JavaScript arrays to Java arrays when passing them to * the methods of injected Java objects. * * The conversions should follow * http://jdk6.java.net/plugin2/liveconnect/#JS_JAVA_CONVERSIONS. Places in * which the implementation differs from the spec are marked with * LIVECONNECT_COMPLIANCE. * FIXME: Consider making our implementation more compliant, if it will not * break backwards-compatibility. See b/4408210. * * To run this test ... * adb shell am instrument -w -e class com.android.webviewtests.JavaBridgeArrayCoercionTest \ * com.android.webviewtests/android.test.InstrumentationTestRunner */ package com.android.webviewtests; public class JavaBridgeArrayCoercionTest extends JavaBridgeTestBase { private class TestObject extends Controller { private Object mObjectInstance; private CustomType mCustomTypeInstance; private boolean[] mBooleanArray; private byte[] mByteArray; private char[] mCharArray; private short[] mShortArray; private int[] mIntArray; private long[] mLongArray; private float[] mFloatArray; private double[] mDoubleArray; private String[] mStringArray; private Object[] mObjectArray; private CustomType[] mCustomTypeArray; public TestObject() { mObjectInstance = new Object(); mCustomTypeInstance = new CustomType(); } public Object getObjectInstance() { return mObjectInstance; } public CustomType getCustomTypeInstance() { return mCustomTypeInstance; } public synchronized void setBooleanArray(boolean[] x) { mBooleanArray = x; notifyResultIsReady(); } public synchronized void setByteArray(byte[] x) { mByteArray = x; notifyResultIsReady(); } public synchronized void setCharArray(char[] x) { mCharArray = x; notifyResultIsReady(); } public synchronized void setShortArray(short[] x) { mShortArray = x; notifyResultIsReady(); } public synchronized void setIntArray(int[] x) { mIntArray = x; notifyResultIsReady(); } public synchronized void setLongArray(long[] x) { mLongArray = x; notifyResultIsReady(); } public synchronized void setFloatArray(float[] x) { mFloatArray = x; notifyResultIsReady(); } public synchronized void setDoubleArray(double[] x) { mDoubleArray = x; notifyResultIsReady(); } public synchronized void setStringArray(String[] x) { mStringArray = x; notifyResultIsReady(); } public synchronized void setObjectArray(Object[] x) { mObjectArray = x; notifyResultIsReady(); } public synchronized void setCustomTypeArray(CustomType[] x) { mCustomTypeArray = x; notifyResultIsReady(); } public synchronized boolean[] waitForBooleanArray() { waitForResult(); return mBooleanArray; } public synchronized byte[] waitForByteArray() { waitForResult(); return mByteArray; } public synchronized char[] waitForCharArray() { waitForResult(); return mCharArray; } public synchronized short[] waitForShortArray() { waitForResult(); return mShortArray; } public synchronized int[] waitForIntArray() { waitForResult(); return mIntArray; } public synchronized long[] waitForLongArray() { waitForResult(); return mLongArray; } public synchronized float[] waitForFloatArray() { waitForResult(); return mFloatArray; } public synchronized double[] waitForDoubleArray() { waitForResult(); return mDoubleArray; } public synchronized String[] waitForStringArray() { waitForResult(); return mStringArray; } public synchronized Object[] waitForObjectArray() { waitForResult(); return mObjectArray; } public synchronized CustomType[] waitForCustomTypeArray() { waitForResult(); return mCustomTypeArray; } } // Two custom types used when testing passing objects. private class CustomType { } private TestObject mTestObject; @Override protected void setUp() throws Exception { super.setUp(); mTestObject = new TestObject(); setUpWebView(mTestObject, "testObject"); } // Note that all tests use a single element array for simplicity. We test // multiple elements elsewhere. // Test passing an array of JavaScript numbers in the int32 range to a // method which takes a Java array. public void testPassNumberInt32() throws Throwable { executeJavaScript("testObject.setBooleanArray([0]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); // LIVECONNECT_COMPLIANCE: Should convert to boolean. executeJavaScript("testObject.setBooleanArray([42]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); executeJavaScript("testObject.setByteArray([42]);"); assertEquals(42, mTestObject.waitForByteArray()[0]); // LIVECONNECT_COMPLIANCE: Should convert to numeric char value. executeJavaScript("testObject.setCharArray([42]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); executeJavaScript("testObject.setShortArray([42]);"); assertEquals(42, mTestObject.waitForShortArray()[0]); executeJavaScript("testObject.setIntArray([42]);"); assertEquals(42, mTestObject.waitForIntArray()[0]); executeJavaScript("testObject.setLongArray([42]);"); assertEquals(42L, mTestObject.waitForLongArray()[0]); executeJavaScript("testObject.setFloatArray([42]);"); assertEquals(42.0f, mTestObject.waitForFloatArray()[0]); executeJavaScript("testObject.setDoubleArray([42]);"); assertEquals(42.0, mTestObject.waitForDoubleArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and create instances of java.lang.Number. executeJavaScript("testObject.setObjectArray([42]);"); assertNull(mTestObject.waitForObjectArray()); // LIVECONNECT_COMPLIANCE: Should create instances of java.lang.String. executeJavaScript("testObject.setStringArray([42]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCustomTypeArray([42]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of JavaScript numbers in the double range to a // method which takes a Java array. public void testPassNumberDouble() throws Throwable { // LIVECONNECT_COMPLIANCE: Should convert to boolean. executeJavaScript("testObject.setBooleanArray([42.1]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); executeJavaScript("testObject.setByteArray([42.1]);"); assertEquals(42, mTestObject.waitForByteArray()[0]); // LIVECONNECT_COMPLIANCE: Should convert to numeric char value. executeJavaScript("testObject.setCharArray([42.1]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); executeJavaScript("testObject.setShortArray([42.1]);"); assertEquals(42, mTestObject.waitForShortArray()[0]); executeJavaScript("testObject.setIntArray([42.1]);"); assertEquals(42, mTestObject.waitForIntArray()[0]); executeJavaScript("testObject.setLongArray([42.1]);"); assertEquals(42L, mTestObject.waitForLongArray()[0]); executeJavaScript("testObject.setFloatArray([42.1]);"); assertEquals(42.1f, mTestObject.waitForFloatArray()[0]); executeJavaScript("testObject.setDoubleArray([42.1]);"); assertEquals(42.1, mTestObject.waitForDoubleArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and create instances of java.lang.Number. executeJavaScript("testObject.setObjectArray([42.1]);"); assertNull(mTestObject.waitForObjectArray()); // LIVECONNECT_COMPLIANCE: Should create instances of java.lang.String. executeJavaScript("testObject.setStringArray([42.1]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCustomTypeArray([42.1]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of JavaScript NaN values to a method which takes a // Java array. public void testPassNumberNaN() throws Throwable { executeJavaScript("testObject.setBooleanArray([Number.NaN]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); executeJavaScript("testObject.setByteArray([Number.NaN]);"); assertEquals(0, mTestObject.waitForByteArray()[0]); executeJavaScript("testObject.setCharArray([Number.NaN]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); executeJavaScript("testObject.setShortArray([Number.NaN]);"); assertEquals(0, mTestObject.waitForShortArray()[0]); executeJavaScript("testObject.setIntArray([Number.NaN]);"); assertEquals(0, mTestObject.waitForIntArray()[0]); executeJavaScript("testObject.setLongArray([Number.NaN]);"); assertEquals(0L, mTestObject.waitForLongArray()[0]); executeJavaScript("testObject.setFloatArray([Number.NaN]);"); assertEquals(Float.NaN, mTestObject.waitForFloatArray()[0]); executeJavaScript("testObject.setDoubleArray([Number.NaN]);"); assertEquals(Double.NaN, mTestObject.waitForDoubleArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and create instances of java.lang.Number. executeJavaScript("testObject.setObjectArray([Number.NaN]);"); assertNull(mTestObject.waitForObjectArray()); // LIVECONNECT_COMPLIANCE: Should create instances of java.lang.String. executeJavaScript("testObject.setStringArray([Number.NaN]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCustomTypeArray([Number.NaN]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of JavaScript infinity values to a method which // takes a Java array. public void testPassNumberInfinity() throws Throwable { executeJavaScript("testObject.setBooleanArray([Infinity]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); executeJavaScript("testObject.setByteArray([Infinity]);"); assertEquals(-1, mTestObject.waitForByteArray()[0]); // LIVECONNECT_COMPLIANCE: Should convert to maximum numeric char value. executeJavaScript("testObject.setCharArray([Infinity]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); executeJavaScript("testObject.setShortArray([Infinity]);"); assertEquals(-1, mTestObject.waitForShortArray()[0]); executeJavaScript("testObject.setIntArray([Infinity]);"); assertEquals(Integer.MAX_VALUE, mTestObject.waitForIntArray()[0]); // LIVECONNECT_COMPLIANCE: Should be Long.MAX_VALUE. executeJavaScript("testObject.setLongArray([Infinity]);"); assertEquals(-1L, mTestObject.waitForLongArray()[0]); executeJavaScript("testObject.setFloatArray([Infinity]);"); assertEquals(Float.POSITIVE_INFINITY, mTestObject.waitForFloatArray()[0]); executeJavaScript("testObject.setDoubleArray([Infinity]);"); assertEquals(Double.POSITIVE_INFINITY, mTestObject.waitForDoubleArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and create instances of java.lang.Number. executeJavaScript("testObject.setObjectArray([Infinity]);"); assertNull(mTestObject.waitForObjectArray()); // LIVECONNECT_COMPLIANCE: Should create instances of java.lang.String. executeJavaScript("testObject.setStringArray([Infinity]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCustomTypeArray([Infinity]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of JavaScript boolean values to a method which // takes a Java array. public void testPassBoolean() throws Throwable { executeJavaScript("testObject.setBooleanArray([true]);"); assertTrue(mTestObject.waitForBooleanArray()[0]); executeJavaScript("testObject.setBooleanArray([false]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); // LIVECONNECT_COMPLIANCE: Should be 1. executeJavaScript("testObject.setByteArray([true]);"); assertEquals(0, mTestObject.waitForByteArray()[0]); executeJavaScript("testObject.setByteArray([false]);"); assertEquals(0, mTestObject.waitForByteArray()[0]); // LIVECONNECT_COMPLIANCE: Should convert to numeric char value 1. executeJavaScript("testObject.setCharArray([true]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); executeJavaScript("testObject.setCharArray([false]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); // LIVECONNECT_COMPLIANCE: Should be 1. executeJavaScript("testObject.setShortArray([true]);"); assertEquals(0, mTestObject.waitForShortArray()[0]); executeJavaScript("testObject.setShortArray([false]);"); assertEquals(0, mTestObject.waitForShortArray()[0]); // LIVECONNECT_COMPLIANCE: Should be 1. executeJavaScript("testObject.setIntArray([true]);"); assertEquals(0, mTestObject.waitForIntArray()[0]); executeJavaScript("testObject.setIntArray([false]);"); assertEquals(0, mTestObject.waitForIntArray()[0]); // LIVECONNECT_COMPLIANCE: Should be 1. executeJavaScript("testObject.setLongArray([true]);"); assertEquals(0L, mTestObject.waitForLongArray()[0]); executeJavaScript("testObject.setLongArray([false]);"); assertEquals(0L, mTestObject.waitForLongArray()[0]); // LIVECONNECT_COMPLIANCE: Should be 1.0. executeJavaScript("testObject.setFloatArray([true]);"); assertEquals(0.0f, mTestObject.waitForFloatArray()[0]); executeJavaScript("testObject.setFloatArray([false]);"); assertEquals(0.0f, mTestObject.waitForFloatArray()[0]); // LIVECONNECT_COMPLIANCE: Should be 1.0. executeJavaScript("testObject.setDoubleArray([true]);"); assertEquals(0.0, mTestObject.waitForDoubleArray()[0]); executeJavaScript("testObject.setDoubleArray([false]);"); assertEquals(0.0, mTestObject.waitForDoubleArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and create instances of java.lang.Number. executeJavaScript("testObject.setObjectArray([true]);"); assertNull(mTestObject.waitForObjectArray()); // LIVECONNECT_COMPLIANCE: Should create instances of java.lang.String. executeJavaScript("testObject.setStringArray([true]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCustomTypeArray([true]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of JavaScript strings to a method which takes a // Java array. public void testPassString() throws Throwable { // LIVECONNECT_COMPLIANCE: Non-empty string should convert to true. executeJavaScript("testObject.setBooleanArray([\"+042.10\"]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); // LIVECONNECT_COMPLIANCE: Should use valueOf() of appropriate type. executeJavaScript("testObject.setByteArray([\"+042.10\"]);"); assertEquals(0, mTestObject.waitForByteArray()[0]); // LIVECONNECT_COMPLIANCE: Should decode and convert to numeric char value. executeJavaScript("testObject.setCharArray([\"+042.10\"]);"); assertEquals(0, mTestObject.waitForCharArray()[0]); // LIVECONNECT_COMPLIANCE: Should use valueOf() of appropriate type. executeJavaScript("testObject.setShortArray([\"+042.10\"]);"); assertEquals(0, mTestObject.waitForShortArray()[0]); // LIVECONNECT_COMPLIANCE: Should use valueOf() of appropriate type. executeJavaScript("testObject.setIntArray([\"+042.10\"]);"); assertEquals(0, mTestObject.waitForIntArray()[0]); // LIVECONNECT_COMPLIANCE: Should use valueOf() of appropriate type. executeJavaScript("testObject.setLongArray([\"+042.10\"]);"); assertEquals(0L, mTestObject.waitForLongArray()[0]); // LIVECONNECT_COMPLIANCE: Should use valueOf() of appropriate type. executeJavaScript("testObject.setFloatArray([\"+042.10\"]);"); assertEquals(0.0f, mTestObject.waitForFloatArray()[0]); // LIVECONNECT_COMPLIANCE: Should use valueOf() of appropriate type. executeJavaScript("testObject.setDoubleArray([\"+042.10\"]);"); assertEquals(0.0, mTestObject.waitForDoubleArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and create instances of java.lang.Number. executeJavaScript("testObject.setObjectArray([\"+042.10\"]);"); assertNull(mTestObject.waitForObjectArray()); executeJavaScript("testObject.setStringArray([\"+042.10\"]);"); assertEquals("+042.10", mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCustomTypeArray([\"+042.10\"]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of JavaScript objects to a method which takes a // Java array. public void testPassJavaScriptObject() throws Throwable { // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setBooleanArray([{foo: 42}]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setByteArray([{foo: 42}]);"); assertEquals(0, mTestObject.waitForByteArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCharArray([{foo: 42}]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setShortArray([{foo: 42}]);"); assertEquals(0, mTestObject.waitForShortArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setIntArray([{foo: 42}]);"); assertEquals(0, mTestObject.waitForIntArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setLongArray([{foo: 42}]);"); assertEquals(0L, mTestObject.waitForLongArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setFloatArray([{foo: 42}]);"); assertEquals(0.0f, mTestObject.waitForFloatArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setDoubleArray([{foo: 42}]);"); assertEquals(0.0, mTestObject.waitForDoubleArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setObjectArray([{foo: 42}]);"); assertNull(mTestObject.waitForObjectArray()); // LIVECONNECT_COMPLIANCE: Should call toString() on object. executeJavaScript("testObject.setStringArray([{foo: 42}]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCustomTypeArray([{foo: 42}]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of Java objects to a method which takes a Java // array. public void testPassJavaObject() throws Throwable { // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setBooleanArray([testObject.getObjectInstance()]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setByteArray([testObject.getObjectInstance()]);"); assertEquals(0, mTestObject.waitForByteArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setCharArray([testObject.getObjectInstance()]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setShortArray([testObject.getObjectInstance()]);"); assertEquals(0, mTestObject.waitForShortArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setIntArray([testObject.getObjectInstance()]);"); assertEquals(0, mTestObject.waitForIntArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setLongArray([testObject.getObjectInstance()]);"); assertEquals(0L, mTestObject.waitForLongArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setFloatArray([testObject.getObjectInstance()]);"); assertEquals(0.0f, mTestObject.waitForFloatArray()[0]); // LIVECONNECT_COMPLIANCE: Should raise a JavaScript exception. executeJavaScript("testObject.setDoubleArray([testObject.getObjectInstance()]);"); assertEquals(0.0, mTestObject.waitForDoubleArray()[0]); // LIVECONNECT_COMPLIANCE: Should create an array and pass Java object. executeJavaScript("testObject.setObjectArray([testObject.getObjectInstance()]);"); assertNull(mTestObject.waitForObjectArray()); // LIVECONNECT_COMPLIANCE: Should call toString() on object. executeJavaScript("testObject.setStringArray([testObject.getObjectInstance()]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and pass Java object. executeJavaScript("testObject.setCustomTypeArray([testObject.getObjectInstance()]);"); assertNull(mTestObject.waitForCustomTypeArray()); executeJavaScript("testObject.setCustomTypeArray([testObject.getCustomTypeInstance()]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of JavaScript null values to a method which takes // a Java array. public void testPassNull() throws Throwable { executeJavaScript("testObject.setByteArray([null]);"); assertEquals(0, mTestObject.waitForByteArray()[0]); executeJavaScript("testObject.setCharArray([null]);"); assertEquals('\u0000', mTestObject.waitForCharArray()[0]); executeJavaScript("testObject.setShortArray([null]);"); assertEquals(0, mTestObject.waitForShortArray()[0]); executeJavaScript("testObject.setIntArray([null]);"); assertEquals(0, mTestObject.waitForIntArray()[0]); executeJavaScript("testObject.setLongArray([null]);"); assertEquals(0L, mTestObject.waitForLongArray()[0]); executeJavaScript("testObject.setFloatArray([null]);"); assertEquals(0.0f, mTestObject.waitForFloatArray()[0]); executeJavaScript("testObject.setDoubleArray([null]);"); assertEquals(0.0, mTestObject.waitForDoubleArray()[0]); executeJavaScript("testObject.setBooleanArray([null]);"); assertFalse(mTestObject.waitForBooleanArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and pass null. executeJavaScript("testObject.setObjectArray([null]);"); assertNull(mTestObject.waitForObjectArray()); executeJavaScript("testObject.setStringArray([null]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and pass null. executeJavaScript("testObject.setCustomTypeArray([null]);"); assertNull(mTestObject.waitForCustomTypeArray()); } // Test passing an array of JavaScript undefined values to a method which // takes a Java array. public void testPassUndefined() throws Throwable { executeJavaScript("testObject.setByteArray([undefined]);"); assertEquals(0, mTestObject.waitForByteArray()[0]); executeJavaScript("testObject.setCharArray([undefined]);"); assertEquals(0, mTestObject.waitForCharArray()[0]); executeJavaScript("testObject.setShortArray([undefined]);"); assertEquals(0, mTestObject.waitForShortArray()[0]); executeJavaScript("testObject.setIntArray([undefined]);"); assertEquals(0, mTestObject.waitForIntArray()[0]); executeJavaScript("testObject.setLongArray([undefined]);"); assertEquals(0L, mTestObject.waitForLongArray()[0]); executeJavaScript("testObject.setFloatArray([undefined]);"); assertEquals(0.0f, mTestObject.waitForFloatArray()[0]); executeJavaScript("testObject.setDoubleArray([undefined]);"); assertEquals(0.0, mTestObject.waitForDoubleArray()[0]); executeJavaScript("testObject.setBooleanArray([undefined]);"); assertEquals(false, mTestObject.waitForBooleanArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and pass null. executeJavaScript("testObject.setObjectArray([undefined]);"); assertNull(mTestObject.waitForObjectArray()); executeJavaScript("testObject.setStringArray([undefined]);"); assertNull(mTestObject.waitForStringArray()[0]); // LIVECONNECT_COMPLIANCE: Should create array and pass null. executeJavaScript("testObject.setCustomTypeArray([undefined]);"); assertNull(mTestObject.waitForCustomTypeArray()); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.benchmark; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.hash.Hashing; import com.google.common.io.Files; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; import com.metamx.common.logger.Logger; import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; import io.druid.js.JavaScriptConfig; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.BitmapIndexSelector; import io.druid.query.filter.BoundDimFilter; import io.druid.query.filter.DimFilter; import io.druid.query.filter.DruidLongPredicate; import io.druid.query.filter.DruidPredicateFactory; import io.druid.query.filter.Filter; import io.druid.query.filter.OrDimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.query.ordering.StringComparators; import io.druid.segment.Cursor; import io.druid.segment.DimensionSelector; import io.druid.segment.IndexIO; import io.druid.segment.IndexMergerV9; import io.druid.segment.IndexSpec; import io.druid.segment.LongColumnSelector; import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexStorageAdapter; import io.druid.segment.StorageAdapter; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnConfig; import io.druid.segment.data.IndexedInts; import io.druid.segment.filter.AndFilter; import io.druid.segment.filter.BoundFilter; import io.druid.segment.filter.DimensionPredicateFilter; import io.druid.segment.filter.Filters; import io.druid.segment.filter.OrFilter; import io.druid.segment.filter.SelectorFilter; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) @Fork(jvmArgsPrepend = "-server", value = 1) @Warmup(iterations = 10) @Measurement(iterations = 25) public class FilterPartitionBenchmark { @Param({"750000"}) private int rowsPerSegment; @Param({"basic"}) private String schema; private static final Logger log = new Logger(FilterPartitionBenchmark.class); private static final int RNG_SEED = 9999; private static final IndexMergerV9 INDEX_MERGER_V9; private static final IndexIO INDEX_IO; public static final ObjectMapper JSON_MAPPER; private IncrementalIndex incIndex; private QueryableIndex qIndex; private File indexFile; private Filter timeFilterNone; private Filter timeFilterHalf; private Filter timeFilterAll; private BenchmarkSchemaInfo schemaInfo; private static String JS_FN = "function(str) { return 'super-' + str; }"; private static ExtractionFn JS_EXTRACTION_FN = new JavaScriptExtractionFn(JS_FN, false, JavaScriptConfig.getDefault()); static { JSON_MAPPER = new DefaultObjectMapper(); INDEX_IO = new IndexIO( JSON_MAPPER, new ColumnConfig() { @Override public int columnCacheSizeBytes() { return 0; } } ); INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO); } @Setup public void setup() throws IOException { log.info("SETUP CALLED AT " + System.currentTimeMillis()); if (ComplexMetrics.getSerdeForType("hyperUnique") == null) { ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(Hashing.murmur3_128())); } schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schema); BenchmarkDataGenerator gen = new BenchmarkDataGenerator( schemaInfo.getColumnSchemas(), RNG_SEED, schemaInfo.getDataInterval(), rowsPerSegment ); incIndex = makeIncIndex(); for (int j = 0; j < rowsPerSegment; j++) { InputRow row = gen.nextRow(); if (j % 10000 == 0) { log.info(j + " rows generated."); } incIndex.add(row); } File tmpFile = Files.createTempDir(); log.info("Using temp dir: " + tmpFile.getAbsolutePath()); tmpFile.deleteOnExit(); indexFile = INDEX_MERGER_V9.persist( incIndex, tmpFile, new IndexSpec() ); qIndex = INDEX_IO.loadIndex(indexFile); Interval interval = schemaInfo.getDataInterval(); timeFilterNone = new BoundFilter(new BoundDimFilter( Column.TIME_COLUMN_NAME, String.valueOf(Long.MAX_VALUE), String.valueOf(Long.MAX_VALUE), true, true, null, null, StringComparators.ALPHANUMERIC )); long halfEnd = (interval.getEndMillis() + interval.getStartMillis()) / 2; timeFilterHalf = new BoundFilter(new BoundDimFilter( Column.TIME_COLUMN_NAME, String.valueOf(interval.getStartMillis()), String.valueOf(halfEnd), true, true, null, null, StringComparators.ALPHANUMERIC )); timeFilterAll = new BoundFilter(new BoundDimFilter( Column.TIME_COLUMN_NAME, String.valueOf(interval.getStartMillis()), String.valueOf(interval.getEndMillis()), true, true, null, null, StringComparators.ALPHANUMERIC )); } private IncrementalIndex makeIncIndex() { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() .withQueryGranularity(QueryGranularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), true, false, true, rowsPerSegment ); } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void stringRead(Blackhole blackhole) throws Exception { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(null, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void longRead(Blackhole blackhole) throws Exception { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(null, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole); List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()).get(0); for (Long st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void timeFilterNone(Blackhole blackhole) throws Exception { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(timeFilterNone, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole); List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()).get(0); for (Long st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void timeFilterHalf(Blackhole blackhole) throws Exception { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(timeFilterHalf, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole); List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()).get(0); for (Long st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void timeFilterAll(Blackhole blackhole) throws Exception { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(timeFilterAll, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole); List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()).get(0); for (Long st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readWithPreFilter(Blackhole blackhole) throws Exception { Filter filter = new SelectorFilter("dimSequential", "199"); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(filter, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readWithPostFilter(Blackhole blackhole) throws Exception { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(filter, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readWithExFnPreFilter(Blackhole blackhole) throws Exception { Filter filter = new SelectorDimFilter("dimSequential", "super-199", JS_EXTRACTION_FN).toFilter(); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(filter, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readWithExFnPostFilter(Blackhole blackhole) throws Exception { Filter filter = new NoBitmapSelectorDimFilter("dimSequential", "super-199", JS_EXTRACTION_FN).toFilter(); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(filter, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readOrFilter(Blackhole blackhole) throws Exception { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); Filter filter2 = new AndFilter(Arrays.<Filter>asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar"))); Filter orFilter = new OrFilter(Arrays.<Filter>asList(filter, filter2)); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(orFilter, schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readOrFilterCNF(Blackhole blackhole) throws Exception { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); Filter filter2 = new AndFilter(Arrays.<Filter>asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar"))); Filter orFilter = new OrFilter(Arrays.<Filter>asList(filter, filter2)); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(Filters.convertToCNF(orFilter), schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readComplexOrFilter(Blackhole blackhole) throws Exception { DimFilter dimFilter1 = new OrDimFilter(Arrays.<DimFilter>asList( new SelectorDimFilter("dimSequential", "199", null), new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null), new SelectorDimFilter("dimMultivalEnumerated", "Bar", null) ) )) ); DimFilter dimFilter2 = new OrDimFilter(Arrays.<DimFilter>asList( new SelectorDimFilter("dimSequential", "299", null), new SelectorDimFilter("dimSequential", "399", null), new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null), new SelectorDimFilter("dimMultivalEnumerated", "Foo", null) ) )) ); DimFilter dimFilter3 = new OrDimFilter(Arrays.<DimFilter>asList( dimFilter1, dimFilter2, new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null), new SelectorDimFilter("dimMultivalEnumerated", "World", null) ) )) ); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(dimFilter3.toFilter(), schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readComplexOrFilterCNF(Blackhole blackhole) throws Exception { DimFilter dimFilter1 = new OrDimFilter(Arrays.<DimFilter>asList( new SelectorDimFilter("dimSequential", "199", null), new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null), new SelectorDimFilter("dimMultivalEnumerated", "Bar", null) ) )) ); DimFilter dimFilter2 = new OrDimFilter(Arrays.<DimFilter>asList( new SelectorDimFilter("dimSequential", "299", null), new SelectorDimFilter("dimSequential", "399", null), new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null), new SelectorDimFilter("dimMultivalEnumerated", "Foo", null) ) )) ); DimFilter dimFilter3 = new OrDimFilter(Arrays.<DimFilter>asList( dimFilter1, dimFilter2, new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null), new SelectorDimFilter("dimMultivalEnumerated", "World", null) ) )) ); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = sa.makeCursors(Filters.convertToCNF(dimFilter3.toFilter()), schemaInfo.getDataInterval(), QueryGranularities.ALL, false); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences.toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } } private Sequence<List<String>> readCursors(Sequence<Cursor> cursors, final Blackhole blackhole) { return Sequences.map( cursors, new Function<Cursor, List<String>>() { @Override public List<String> apply(Cursor input) { List<String> strings = new ArrayList<String>(); List<DimensionSelector> selectors = new ArrayList<>(); selectors.add(input.makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null))); //selectors.add(input.makeDimensionSelector(new DefaultDimensionSpec("dimB", null))); while (!input.isDone()) { for (DimensionSelector selector : selectors) { IndexedInts row = selector.getRow(); blackhole.consume(selector.lookupName(row.get(0))); //strings.add(selector.lookupName(row.get(0))); } input.advance(); } return strings; } } ); } private Sequence<List<Long>> readCursorsLong(Sequence<Cursor> cursors, final Blackhole blackhole) { return Sequences.map( cursors, new Function<Cursor, List<Long>>() { @Override public List<Long> apply(Cursor input) { List<Long> longvals = new ArrayList<Long>(); LongColumnSelector selector = input.makeLongColumnSelector("sumLongSequential"); while (!input.isDone()) { long rowval = selector.get(); blackhole.consume(rowval); input.advance(); } return longvals; } } ); } private class NoBitmapSelectorFilter extends SelectorFilter { public NoBitmapSelectorFilter( String dimension, String value ) { super(dimension, value); } @Override public boolean supportsBitmapIndex(BitmapIndexSelector selector) { return false; } } private class NoBitmapDimensionPredicateFilter extends DimensionPredicateFilter { public NoBitmapDimensionPredicateFilter( final String dimension, final DruidPredicateFactory predicateFactory, final ExtractionFn extractionFn ) { super(dimension, predicateFactory, extractionFn); } @Override public boolean supportsBitmapIndex(BitmapIndexSelector selector) { return false; } } private class NoBitmapSelectorDimFilter extends SelectorDimFilter { public NoBitmapSelectorDimFilter( String dimension, String value, ExtractionFn extractionFn ) { super(dimension, value, extractionFn); } @Override public Filter toFilter() { ExtractionFn extractionFn = getExtractionFn(); String dimension = getDimension(); final String value = getValue(); if (extractionFn == null) { return new NoBitmapSelectorFilter(dimension, value); } else { final String valueOrNull = Strings.emptyToNull(value); final DruidPredicateFactory predicateFactory = new DruidPredicateFactory() { @Override public Predicate<String> makeStringPredicate() { return new Predicate<String>() { @Override public boolean apply(String input) { return Objects.equals(valueOrNull, input); } }; } @Override public DruidLongPredicate makeLongPredicate() { return new DruidLongPredicate() { @Override public boolean applyLong(long input) { return false; } }; } }; return new NoBitmapDimensionPredicateFilter(dimension, predicateFactory, extractionFn); } } } }
/*<license> Copyright 2004 - $Date$ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.bean_VI; import static org.ppwcode.metainfo_I.License.Type.APACHE_V2; import static org.ppwcode.util.reflect_I.Properties.hasProperty; import org.ppwcode.exception_N.SemanticException; import org.ppwcode.metainfo_I.Copyright; import org.ppwcode.metainfo_I.License; import org.ppwcode.metainfo_I.vcs.SvnInfo; import org.toryt.annotations_I.Basic; import org.toryt.annotations_I.Expression; import org.toryt.annotations_I.Invars; import org.toryt.annotations_I.MethodContract; /** * <p>PropertyExceptions are exceptions that carry with them information * about the property for which they occurred. They are usually thrown * by a property setter during validation. If the property name is * <code>null</code>, it means that the exception could not be * attributed to a specific property of {@link #getOrigin()}. * <em>The <code>origin</code> should not be <code>null</code></em>, * except when the exception is thrown during construction of an * object, that could not be completed. In that case, the type * should be filled out. Carrying the reference to the object would * expose an incompletely initialized object, as the exception * signals a failure to complete the initialization.</p> * <p>Localized messages are sougth * in a <kbd>*.properties</kbd> file for the class of the origin. The * properties files should be in the directory next to the bean class, with * name <kbd><var>{@link #getOrigin()}<code>.getClass().getName()</code></var> * <var>_locale_identification</var>.properties</kbd>. Alternatively, * messages can be added to a properties file that comes with the * exception.</p> * <p>The keys for the localized messages have to have the form * <code><var>this.getClass().getName()</var>. * <var>{@link #getPropertyName()}</var>. * <var>{@link #getMessage()}</var></code>. * See {@link #getLocalizedMessageKeys()}.</p> * * @note Throwables cannot have generic parameters. Otherwise we would have used * this instead of {@link #getOriginType()}. * * @author Jan Dockx * @author PeopleWare n.v. * * @idea (jand): Check that property name is truly a property of the origin * @mudo extends SemanticException extends InternalException * @mudo i18n */ @Copyright("2004 - $Date$, PeopleWare n.v.") @License(APACHE_V2) @SvnInfo(revision = "$Revision$", date = "$Date$") @Invars( @Expression("message == null || ! message.equals(EMPTY)") ) public class PropertyException extends SemanticException { /*<construction>*/ //------------------------------------------------------------------------- /** * @param origin * The bean that has thrown this exception. * @param propertyName * The name of the property of which the setter has thrown * this exception because parameter validation failed. * @param message * The message that describes the exceptional circumstance. * @param cause * The exception that occurred, causing this exception to be * thrown, if that is the case. */ @MethodContract( pre = { @Expression("_origin != null"), @Expression("_propertyName != null ? hasProperty(_origin.class, _propertyName)"), @Expression("_message == null || ! _message.equals(EMPTY)") }, post = { @Expression("origin == _origin"), @Expression("originType == _origin.class"), @Expression("propertyName == _propertyName"), @Expression("message == _message"), @Expression("cause == _cause") } ) public PropertyException(final Object origin, final String propertyName, final String message, final Throwable cause) { super(message, cause); assert origin != null; assert ! (origin instanceof Class); assert (propertyName == null) || hasProperty(origin.getClass(), propertyName); assert (message == null) || (! message.equals("")); $origin = origin; $originType = origin.getClass(); $propertyName = propertyName; } /** * @param origin * The bean that has thrown this exception. * @param inOriginInitialization * Set to <code>true</code> if this active * property is created during the origin initialization; * if so, an exception will not carry a reference * to the bean, but only to the bean type. * @param propertyName * The name of the property of which the setter has thrown * this exception because parameter validation failed. * @param message * The message that describes the exceptional circumstance. * @param cause * The exception that occurred, causing this exception to be * thrown, if that is the case. * * @since IV 1.1.0/1.0 */ @MethodContract( pre = { @Expression("_origin != null"), @Expression("_propertyName != null ? hasProperty(_origin.class, _propertyName)"), @Expression("_message == null || ! _message.equals(EMPTY)") }, post = { @Expression("_inOriginInitialization ? origin == null : origin == _origin"), @Expression("originType == _origin.class"), @Expression("propertyName == _propertyName"), @Expression("message == _message"), @Expression("cause == _cause") } ) public PropertyException(final Object origin, final boolean inOriginInitialization, final String propertyName, final String message, final Throwable cause) { super(message, cause); assert origin != null; assert (propertyName == null) || hasProperty(origin.getClass(), propertyName); assert (message == null) || (!message.equals("")); if (!inOriginInitialization) { $origin = origin; } $originType = origin.getClass(); $propertyName = propertyName; } /** * @param originType * The bean that has thrown this exception. * @param propertyName * The name of the property of which the setter has thrown * this exception because parameter validation failed. * @param message * The message that describes the exceptional circumstance. * @param cause * The exception that occurred, causing this exception to be * thrown, if that is the case. * * @since IV */ @MethodContract( pre = { @Expression("_originType != null"), @Expression("_propertyName != null ? hasProperty(_originType, _propertyName)"), @Expression("_message == null || ! _message.equals(EMPTY)") }, post = { @Expression("origin == null"), @Expression("originType == _originType"), @Expression("propertyName == _propertyName"), @Expression("message == _message"), @Expression("cause == _cause") } ) public PropertyException(final Class<?> originType, final String propertyName, final String message, final Throwable cause) { super(message, cause); assert originType != null; assert (propertyName == null) || hasProperty(originType, propertyName); assert (message == null) || (!message.equals("")); $originType = originType; $propertyName = propertyName; } /*</construction>*/ /*<property name="origin">*/ //------------------------------------------------------------------ /** * The bean that has thrown this exception. */ @Basic( invars = { @Expression("originType != null"), @Expression("origin != null ? originType == origin.class") } ) public final Object getOrigin() { return $origin; } private Object $origin; /*</property>*/ /*<property name="originType">*/ //------------------------------------------------------------------ /** * The type of the bean that has thrown this exception. */ @Basic( invars = @Expression("originType != null") ) public final Class<?> getOriginType() { return $originType; } private Class<?> $originType; /*</property>*/ /*<property name="propertyName">*/ //------------------------------------------------------------------ /** * The name of the property for which this */ @Basic( invars = @Expression("propertyName != null ? hasProperty(originType, propertyName)") ) public final String getPropertyName() { return $propertyName; } private String $propertyName; /*</property>*/ /*<section name="comparison">*/ //------------------------------------------------------------------ /** * Compare {@code other} to this: is other of the the exact same * type and does other have the exact same properties. * * This method is an alternative to {@link #equals(Object)}, which * we cannot override, because we need to keep reference semantics * for exceptions. * * This method is introduced mainly for use in contracts of methods * that throw property exceptions, and in unit tests for those * methods. * * This method must be overridden in every subclass that adds a property * to include that property in the comparison. * * @note methods was formerly called {@code hasSameValues}, and now replaces * {@code hasSameValues}, 2 {@code contains} methods and 2 {@code reportsOn} * methods, which in practice did not fulfill their promise. * * @since VI */ @MethodContract( post = @Expression("result ? (_other != null) && (_other.class = class) && " + "(origin == _other.origin) && (originType == _other.originType) && " + "(propertyName == _other.propertyName) && (message == _other.message) && " + "(cause == _other.cause)") ) public boolean like(PropertyException other) { return (other != null) && (other.getClass() == getClass()) && (other.getOrigin() == getOrigin()) && (other.getOriginType() == getOriginType()) && eqn(other.getPropertyName(), getPropertyName()) && eqn(other.getMessage(), getMessage()) && (other.getCause() == getCause()); } protected final boolean eqn(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } }
package org.telegram.android.fragments; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Bundle; import android.os.SystemClock; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import android.widget.Toast; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuInflater; import org.telegram.android.base.MediaReceiverFragment; import org.telegram.android.R; import org.telegram.android.core.UserSourceListener; import org.telegram.android.core.background.AvatarUploader; import org.telegram.android.core.model.PeerType; import org.telegram.android.core.model.file.AbsFileSource; import org.telegram.android.core.model.file.FileSource; import org.telegram.android.core.model.file.FileUriSource; import org.telegram.android.core.model.media.TLLocalAvatarEmpty; import org.telegram.android.core.model.media.TLLocalAvatarPhoto; import org.telegram.android.core.model.media.TLLocalFileLocation; import org.telegram.android.log.Logger; import org.telegram.android.core.model.User; import org.telegram.android.preview.AvatarView; import org.telegram.android.tasks.AsyncAction; import org.telegram.android.tasks.AsyncException; import org.telegram.android.tasks.ProgressInterface; import org.telegram.android.ui.Placeholders; import org.telegram.android.ui.TextUtil; import org.telegram.api.*; import org.telegram.api.requests.TLRequestAuthLogOut; import org.telegram.api.requests.TLRequestAuthResetAuthorizations; import org.telegram.api.requests.TLRequestPhotosUpdateProfilePhoto; import java.io.*; /** * Author: Korshakov Stepan * Created: 31.07.13 4:48 */ public class SettingsFragment extends MediaReceiverFragment implements UserSourceListener, AvatarUploader.AvatarUserUploadListener { private AvatarView avatar; private TextView nameView; private TextView phoneView; private View avatarUploadView; private View avatarUploadError; private View avatarUploadProgress; private View progress; private int debugClickCount = 0; private long lastDebugClickTime = 0; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View res = wrap(inflater).inflate(R.layout.settings_main, container, false); avatarUploadView = res.findViewById(R.id.avatarUploadProgress); avatarUploadProgress = res.findViewById(R.id.uploadProgressBar); avatarUploadError = res.findViewById(R.id.uploadError); progress = res.findViewById(R.id.progress); avatarUploadView.setVisibility(View.GONE); avatarUploadProgress.setVisibility(View.GONE); avatarUploadError.setVisibility(View.GONE); progress.setVisibility(View.GONE); setDefaultProgressInterface(new ProgressInterface() { @Override public void showContent() { } @Override public void hideContent() { } @Override public void showProgress() { progress.setVisibility(View.VISIBLE); } @Override public void hideProgress() { progress.setVisibility(View.GONE); } }); TextView textView = (TextView) res.findViewById(R.id.version); PackageInfo pInfo = null; try { pInfo = application.getPackageManager().getPackageInfo(application.getPackageName(), 0); textView.setText(unicodeWrap(pInfo.versionName)); } catch (PackageManager.NameNotFoundException e) { e.printStackTrace(); textView.setText("Unknown"); } textView.setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { if (debugClickCount == 0 || SystemClock.uptimeMillis() > lastDebugClickTime + 1000) { lastDebugClickTime = SystemClock.uptimeMillis(); debugClickCount = 1; } else { lastDebugClickTime = SystemClock.uptimeMillis(); debugClickCount++; if (debugClickCount > 6) { debugClickCount = 0; application.getTechKernel().getDebugSettings().setDeveloperMode(true); application.getTechKernel().getDebugSettings().setSaveLogs(true); Logger.enableDiskLog(); Toast.makeText(application, "Enabling developer settings", Toast.LENGTH_SHORT).show(); getView().findViewById(R.id.developmentButton).setVisibility(View.VISIBLE); getView().findViewById(R.id.developmentDiv).setVisibility(View.VISIBLE); } } } })); res.findViewById(R.id.logoutButton).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { runUiTask(new AsyncAction() { @Override public void execute() throws AsyncException { rpc(new TLRequestAuthLogOut()); application.getKernel().logOut(); } @Override public void afterExecute() { getRootController().onLogout(); } }); } })); res.findViewById(R.id.deleteAccount).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { openUri(Uri.parse("https://telegram.org/deactivate")); } })); res.findViewById(R.id.changeAvatar).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { int state = application.getSyncKernel().getAvatarUploader().getAvatarUploadState(); if (state == AvatarUploader.STATE_ERROR) { AlertDialog dialog = new AlertDialog.Builder(getActivity()) .setTitle(R.string.st_avatar_change_error_title) .setMessage(R.string.st_avatar_change_error_message) .setPositiveButton(R.string.st_try_again, secure(new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { application.getSyncKernel().getAvatarUploader().tryAgainUploadAvatar(); } })) .setNegativeButton(R.string.st_cancel, secure(new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { application.getSyncKernel().getAvatarUploader().cancelUploadAvatar(); } })).create(); dialog.setCanceledOnTouchOutside(true); dialog.show(); } else { User user = application.getEngine().getUser(application.getCurrentUid()); if (user != null && user.getPhoto() instanceof TLLocalAvatarPhoto) { requestPhotoChooser(0, PICK_DEFAULT | PICK_DELETE); } else { requestPhotoChooser(0, PICK_DEFAULT); } } } })); res.findViewById(R.id.chatSettings).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { getRootController().openChatSettings(); } })); res.findViewById(R.id.resetSessions).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { AlertDialog dialog = new AlertDialog.Builder(getActivity()) .setTitle(R.string.st_settings_reset_title) .setMessage(R.string.st_settings_reset_message) .setPositiveButton(R.string.st_reset, secure(new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { runUiTask(new AsyncAction() { @Override public void execute() throws AsyncException { rpc(new TLRequestAuthResetAuthorizations()); application.getUpdateProcessor().invalidateUpdates(); } @Override public void afterExecute() { Toast.makeText(getActivity(), R.string.st_settings_reset_toast, Toast.LENGTH_SHORT).show(); } }); } })) .setNegativeButton(R.string.st_cancel, null) .create(); dialog.setCanceledOnTouchOutside(true); dialog.show(); } })); res.findViewById(R.id.support).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { getRootController().openDialog(PeerType.PEER_USER, 333000); } })); res.findViewById(R.id.googlePlay).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { openUri(Uri.parse("https://play.google.com/store/apps/details?id=" + application.getPackageName())); } })); res.findViewById(R.id.twitter).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { if (getStringSafe(R.string.st_lang).equals("ar")) { openUri(Uri.parse("https://twitter.com/telegram_arabic")); } else { openUri(Uri.parse("https://twitter.com/telegram")); } } })); res.findViewById(R.id.faq).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { openUri(Uri.parse("https://telegram.org/faq")); } })); res.findViewById(R.id.notifications).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { getRootController().openNotificationSettings(); } })); res.findViewById(R.id.blocked).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { getRootController().openBlocked(); } })); res.findViewById(R.id.editName).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View view) { getRootController().openNameSettings(); } })); res.findViewById(R.id.developmentButton).setOnClickListener(secure(new View.OnClickListener() { @Override public void onClick(View v) { getRootController().openDebugSettings(); } })); if (application.getTechKernel().getDebugSettings().isDeveloperMode()) { res.findViewById(R.id.developmentButton).setVisibility(View.VISIBLE); res.findViewById(R.id.developmentDiv).setVisibility(View.VISIBLE); } else { res.findViewById(R.id.developmentButton).setVisibility(View.GONE); res.findViewById(R.id.developmentDiv).setVisibility(View.GONE); } avatar = (AvatarView) res.findViewById(R.id.avatar); // avatar.setEmptyGreyUser(application.getCurrentUid()); nameView = (TextView) res.findViewById(R.id.userName); phoneView = (TextView) res.findViewById(R.id.phone); if (application.isRTL()) { nameView.setGravity(Gravity.RIGHT); phoneView.setGravity(Gravity.RIGHT | Gravity.CENTER_VERTICAL); } else { nameView.setGravity(Gravity.LEFT); phoneView.setGravity(Gravity.LEFT | Gravity.CENTER_VERTICAL); } return res; } @Override public void onResume() { super.onResume(); application.getUserSource().registerListener(this); application.getSyncKernel().getAvatarUploader().setListener(this); updateUser(); } private void updateUser() { User user = application.getEngine().getUser(application.getCurrentUid()); if (user != null) { boolean isLoaded = false; avatar.setEmptyGreyUser(); int state = application.getSyncKernel().getAvatarUploader().getAvatarUploadState(); if (state != AvatarUploader.STATE_NONE) { AbsFileSource fileSource = application.getSyncKernel().getAvatarUploader().getAvatarUploadingSource(); if (fileSource != null) { if (fileSource instanceof FileSource) { avatar.requestRawAvatarSwitch(((FileSource) fileSource).getFileName()); showView(avatarUploadView); isLoaded = true; } else if (fileSource instanceof FileUriSource) { avatar.requestRawAvatarSwitch(Uri.parse(((FileUriSource) fileSource).getUri())); showView(avatarUploadView); isLoaded = true; } } if (isLoaded) { if (state == AvatarUploader.STATE_ERROR) { showView(avatarUploadError, false); hideView(avatarUploadProgress, false); } else { hideView(avatarUploadError, false); showView(avatarUploadProgress, false); } } } if (!isLoaded) { hideView(avatarUploadView); if (user.getPhoto() instanceof TLLocalAvatarPhoto) { TLLocalAvatarPhoto photo = (TLLocalAvatarPhoto) user.getPhoto(); if (photo.getPreviewLocation() instanceof TLLocalFileLocation) { avatar.requestAvatarSwitch(photo.getPreviewLocation()); } else { avatar.requestAvatarSwitch(null); } } else { avatar.requestAvatarSwitch(null); } } nameView.setText(unicodeWrap(user.getDisplayName())); phoneView.setText(unicodeWrap(TextUtil.formatPhone(user.getPhone()))); } else { avatar.requestAvatar(null); nameView.setText("Loading..."); phoneView.setText("Loading..."); } } @Override public void onPause() { super.onPause(); application.getUserSource().unregisterListener(this); application.getSyncKernel().getAvatarUploader().setListener(null); } @Override public void onDestroyView() { super.onDestroyView(); nameView = null; phoneView = null; avatar = null; } @Override protected void onPhotoArrived(Uri uri, int width, int height, int requestId) { if (requestId == 0) { if (cropSupported(uri)) { requestCrop(uri, 200, 200, 0); } else { doUploadAvatar(null, uri); } } } @Override protected void onPhotoArrived(final String fileName, int width, int height, int requestId) { if (requestId == 0) { if (cropSupported(Uri.fromFile(new File(fileName)))) { requestCrop(fileName, 200, 200, 0); } else { doUploadAvatar(fileName, null); } } } @Override protected void onPhotoCropped(Uri uri, int requestId) { if (requestId == 0) { doUploadAvatar(null, uri); } } @Override protected void onPhotoDeleted(int requestId) { if (requestId == 0) { doUploadAvatar(null, null); } } private void doUploadAvatar(final String fileName, final Uri uri) { if (fileName != null || uri != null) { if (uri != null) { application.getSyncKernel().getAvatarUploader().uploadAvatar(new FileUriSource(uri.toString())); } else { application.getSyncKernel().getAvatarUploader().uploadAvatar(new FileSource(fileName)); } } else { runUiTask(new AsyncAction() { @Override public void execute() throws AsyncException { TLAbsUserProfilePhoto photo = rpc(new TLRequestPhotosUpdateProfilePhoto(new TLInputPhotoEmpty(), new TLInputPhotoCropAuto())); if (photo instanceof TLUserProfilePhoto) { TLUserProfilePhoto profilePhoto = (TLUserProfilePhoto) photo; TLFileLocation smallLocation = (TLFileLocation) profilePhoto.getPhotoSmall(); TLFileLocation largeLocation = (TLFileLocation) profilePhoto.getPhotoBig(); TLLocalAvatarPhoto localProfilePhoto = new TLLocalAvatarPhoto(); localProfilePhoto.setPreviewLocation(new TLLocalFileLocation(smallLocation.getDcId(), smallLocation.getVolumeId(), smallLocation.getLocalId(), smallLocation.getSecret(), 0)); localProfilePhoto.setFullLocation(new TLLocalFileLocation(largeLocation.getDcId(), largeLocation.getVolumeId(), largeLocation.getLocalId(), largeLocation.getSecret(), 0)); application.getEngine().getUsersEngine().onUserPhotoChanges(application.getCurrentUid(), localProfilePhoto); } else { application.getEngine().getUsersEngine().onUserPhotoChanges(application.getCurrentUid(), new TLLocalAvatarEmpty()); } } @Override public void afterExecute() { Toast.makeText(getActivity(), R.string.st_avatar_removed, Toast.LENGTH_SHORT).show(); } }); } } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); getSherlockActivity().getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSherlockActivity().getSupportActionBar().setDisplayShowHomeEnabled(false); getSherlockActivity().getSupportActionBar().setTitle(highlightTitleText(R.string.st_settings_title)); getSherlockActivity().getSupportActionBar().setSubtitle(null); } @Override public void onUsersChanged(User[] users) { for (User u : users) { if (u.getUid() == application.getCurrentUid()) { updateUser(); return; } } } @Override public void onAvatarUploadingStateChanged() { secureCallback(new Runnable() { @Override public void run() { updateUser(); } }); } }
package org.ovirt.engine.core.common.config; import java.util.HashMap; import java.util.Map; public enum ConfigValues { @TypeConverterAttribute(String.class) @DefaultValueAttribute("9b9002d1-ec33-4083-8a7b-31f6b8931648") AdUserId(0), @TypeConverterAttribute(String.class) @DefaultValueAttribute("SQLServer") DBEngine(1), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("5") PSAsyncActionTimeOutInMinutes(2), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("120") SelectCommandTimeout(3), @TypeConverterAttribute(String.class) @DefaultValueAttribute("VdcDbConnection") DefaultDataBaseName(4), @TypeConverterAttribute(String.class) @DefaultValueAttribute("administrator") AdUserName(5), @TypeConverterAttribute(Map.class) @DefaultValueAttribute("REDHAT.COM:123456") @OptionBehaviourAttribute(behaviour = OptionBehaviour.DomainsPasswordMap) AdUserPassword(8), @TypeConverterAttribute(String.class) @DefaultValueAttribute("123456") @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password) LocalAdminPassword(9), @TypeConverterAttribute(String.class) @DefaultValueAttribute("REDHAT.COM") DomainName(10), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("180") vdsTimeout(11), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("2") VdsRefreshRate(12), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") AsyncTaskPollingRate(13), /** * The rate (in seconds) to refresh the cache that holds the asynchronous tasks' statuses. */ @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("60") AsyncTaskStatusCacheRefreshRateInSeconds(15), /** * The period of time (in minutes) to hold the asynchronous tasks' statuses in the asynchronous tasks cache. */ @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("5") AsyncTaskStatusCachingTimeInMinutes(16), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3000") AsyncTaskZombieTaskLifeInMinutes(17), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3600") UserRefreshRate(18), @TypeConverterAttribute(java.util.Date.class) @DefaultValueAttribute("03:35:35") AuditLogCleanupTime(19), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") AuditLogAgingThreashold(20), @TypeConverterAttribute(String.class) @DefaultValueAttribute("00:1A:4A:16:72:XX") MigrationMinPort(21), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("100") MigrationPortRange(22), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") LogXmlRpcData(23), @TypeConverterAttribute(String.class) @DefaultValueAttribute("drac5,ilo,ipmilan,rsa,bladecenter,alom,apc,eps,wti,rsb") VdsFenceType(24), @TypeConverterAttribute(String.class) @DefaultValueAttribute("drac5:secure=yes,ilo:ssl=yes,ipmilan:,rsa:secure=yes,bladecenter:secure=yes,alom:secure=yes,apc:secure=yes,eps:,wti:secure=yes,rsb:") VdsFenceOptions(25), @TypeConverterAttribute(String.class) @DefaultValueAttribute("alom:secure=secure,port=ipport;apc:secure=secure,port=ipport,slot=port;bladecenter:secure=secure,port=ipport,slot=port;drac5:secure=secure,port=ipport;eps:slot=port;ilo:secure=ssl,port=ipport;ipmilan:;rsa:secure=secure,port=ipport;rsb:;wti:secure=secure,port=ipport,slot=port") VdsFenceOptionMapping(26), @TypeConverterAttribute(String.class) @DefaultValueAttribute("secure=bool,port=int,slot=int") VdsFenceOptionTypes(27), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") FenceStopStatusRetries(28), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("60") FenceStopStatusDelayBetweenRetriesInSec(29), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("180") FenceQuietTimeBetweenOperationsInSec(30), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/data/updates/ovirt-node-image.iso") oVirtUploadPath(32), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/usr/share/rhev-hypervisor") oVirtISOsRepositoryPath(33), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/usr/share/vdsm-reg/vdsm-upgrade") oVirtUpgradeScriptName(34), @TypeConverterAttribute(String.class) @DefaultValueAttribute("Scripts\\vds_installer.py") BootstrapInstallerFileName(35), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/usr/share/vdsm-reg/ovirt-vdsm-gen-cert.py") CBCCertificateScriptName(36), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/usr/share/vdsm-reg/ovirt-vdsm-complete.py") CBCCloseCertificateScriptName(37), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("5") VdsCertificateValidityInYears(38), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") RemoteBackend(39), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") RemoteUserBackend(40), @TypeConverterAttribute(String.class) @DefaultValueAttribute("Remoting") RemoteInterface(41), @TypeConverterAttribute(String.class) @DefaultValueAttribute("AutoBackend_tcp") AutoRemoteInterface(42), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") RemoteUri(43), @TypeConverterAttribute(String.class) @DefaultValueAttribute("USERID") UserId(44), @TypeConverterAttribute(String.class) @DefaultValueAttribute("PASSW0RD") @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password) Password(45), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") IncludeDesktop(47), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("100") SearchResultsLimit(48), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("2") VDSAttemptsToResetCount(49), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") TimeoutToResetVdsInSeconds(50), @TypeConverterAttribute(String.class) @DefaultValueAttribute("VirtualMachineDomainName") VirtualMachineDomainName(51), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ProductKey2003(52), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ProductKey2003x64(53), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ProductKey2008(54), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ProductKey2008x64(55), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ProductKey2008R2(56), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ProductKeyWindow7(57), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ProductKeyWindow7x64(58), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ProductKey(59), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") FreeSpaceLow(60), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("5") FreeSpaceCriticalLowInGB(61), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") MacPoolRanges(62), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") HasCluster(63), @TypeConverterAttribute(String.class) @DefaultValueAttribute("1.0.0.0") VdcVersion(64), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") SSLEnabled(65), @TypeConverterAttribute(String.class) @DefaultValueAttribute("DEFAULT") CipherSuite(66), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") MinVmDiskSize(67), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("250") MaxVmDiskSize(68), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("40") StoragePoolNameSizeLimit(69), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("50") StorageDomainNameSizeLimit(70), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") ImageCheckFailureMessageTimout(71), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") SlowStorageResponseMessageTimout(72), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") NumberOfFailedRunsOnVds(73), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") TimeToReduceFailedRunOnVdsInMinutes(74), /** * In default rerun Vm on all Available desktops */ @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") MaxRerunVmOnVdsCount(75), @TypeConverterAttribute(String.class) @DefaultValueAttribute("PerServer") AutoMode(78), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") SysPrepXPPath(79), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") SysPrep2K3Path(80), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") SysPrep2K8Path(81), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") SysPrep2K8x64Path(82), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") SysPrep2K8R2Path(83), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") SysPrepWindows7Path(84), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") SysPrepWindows7x64Path(85), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("7200") AutoPostbackDelay(86), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1000") MaxVmsInPool(87), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("5") VmPoolLeaseDays(88), @TypeConverterAttribute(String.class) @DefaultValueAttribute("12:00") VmPoolLeaseStartTime(89), @TypeConverterAttribute(String.class) @DefaultValueAttribute("12:00") VmPoolLeaseEndTime(90), @TypeConverterAttribute(String.class) @DefaultValueAttribute("WORKGROUP") DefaultWorkgroup(91), @TypeConverterAttribute(String.class) @DefaultValueAttribute("LDAP") AuthenticationMethod(92), @TypeConverterAttribute(java.util.List.class) @DefaultValueAttribute("1,2,4") @OptionBehaviourAttribute(behaviour = OptionBehaviour.CommaSeparatedStringArray) ValidNumOfMonitors(93), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("16") MaxNumOfVmCpus(94), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("16") MaxNumOfVmSockets(95), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("16") MaxNumOfCpuPerSocket(96), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1") NumberVmRefreshesBeforeSave(97), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("60") AutoRepoDomainRefreshTime(99), @TypeConverterAttribute(String.class) @DefaultValueAttribute("certs/ca.pem") CACertificatePath(100), @TypeConverterAttribute(String.class) @DefaultValueAttribute("ca") CABaseDirectory(101), @TypeConverterAttribute(String.class) @DefaultValueAttribute("certs/engine.cer") CertificateFileName(102), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password) CertificatePassword(103), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") CertificateFingerPrint(104), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") LicenseCertificateFingerPrint(105), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") VdcBootStrapUrl(106), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") InstallVds(108), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") EnableUSBAsDefault(110), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("600") SSHInactivityTimoutSeconds(111), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("120") ServerRebootTimeout(112), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("40") VmGracefulShutdownTimeout(113), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("100") VmPriorityMaxValue(114), @TypeConverterAttribute(String.class) @DefaultValueAttribute("Shutting Down") VmGracefulShutdownMessage(115), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") SearchesRefreshRateInSeconds(116), @TypeConverterAttribute(String.class) @DefaultValueAttribute("smain,sinputs") SpiceSecureChannels(117), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") AutoSuspendTimeInMinutes(118), @TypeConverterAttribute(String.class) @DefaultValueAttribute("shift+f12") SpiceReleaseCursorKeys(119), @TypeConverterAttribute(String.class) @DefaultValueAttribute("shift+f11") SpiceToggleFullScreenKeys(120), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") SpiceUsbAutoShare(121), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") RDPLoginWithFQN(122), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") UseSecureConnectionWithServers(123), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("500") MaxResultsPageSize(124), @TypeConverterAttribute(String.class) @DefaultValueAttribute("RedHat") OrganizationName(125), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") IsMultilevelAdministrationOn(127), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") VdsRecoveryTimeoutInMintues(128), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("2047") MaxDiskSize(129), // the order is- {level}:{name}:{flags}:{vdsm}; // {level}:{name}:{flags}:{vdsm};1:cpu_name:cpu_flags,..,:vdsm_exec,+..,-..;.. @TypeConverterAttribute(String.class) @DefaultValueAttribute("1:pentium3:vmx:pentium3;2:intel-qemu64-nx:vmx,sse2:qemu64,-nx,+sse2;3:intel-qemu64:vmx,sse2,nx:qemu64,+sse2;2:amd-qemu64-nx:svm,sse2:qemu64,-nx,+sse2;3:amd-qemu64:svm,sse2,nx:qemu64,+sse2") ServerCPUList(130), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") UseVdsBrokerInProc(131), @TypeConverterAttribute(String.class) @DefaultValueAttribute("RHEV-Agent") AgentAppName(132), @TypeConverterAttribute(Map.class) @DefaultValueAttribute("{\"windows\":\"RHEV-Spice\",\"linux\":\"xorg-x11-drv-qxl\"}") SpiceDriverNameInGuest(133), @TypeConverterAttribute(String.class) @DefaultValueAttribute("RHEV-toolsSetup_") GuestToolsSetupIsoPrefix(134), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") VcpuConsumptionPercentage(135), @TypeConverterAttribute(String.class) @DefaultValueAttribute("OvfMetaFiles\\") OvfDirectoryPath(136), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/images/export/") ExportVmDefaultPath(137), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/images/import/") ImportDefaultPath(138), @TypeConverterAttribute(String.class) @DefaultValueAttribute("None") VdsSelectionAlgorithm(139), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") EnableVdsLoadBalancing(140), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1") VdsLoadBalancingeIntervalInMinutes(141), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("0") LowUtilizationForEvenlyDistribute(142), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("6") LowUtilizationForPowerSave(143), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") HighUtilizationForEvenlyDistribute(144), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") HighUtilizationForPowerSave(145), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("80") UtilizationThresholdInPercent(146), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("2") CpuOverCommitDurationMinutes(147), // a default of 120% memory over commit. @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("120") MaxVdsMemOverCommit(148), // a default of 120% memory over commit. @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("120") MaxVdsMemOverCommitForServers(149), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientGUI(150), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1") PowerClientMaxNumberOfConcurrentVMs(151), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientAllowUsingAsIRS(152), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientAutoMigrateToPowerClientOnConnect(153), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientAutoMigrateFromPowerClientToVdsWhenConnectingFromRegularClient(154), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientAutoMigrateCheckOnRDP(155), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") PowerClientSpiceDynamicCompressionManagement(156), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") PowerClientAutoAdjustMemory(157), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") PowerClientAutoAdjustMemoryBaseOnAvailableMemory(158), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") PowerClientAutoAdjustMemoryLog(159), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("256") PowerClientAutoAdjustMemoryGeneralReserve(160), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("100") PowerClientAutoAdjustMemorySpicePerSessionReserve(161), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("50") PowerClientAutoAdjustMemorySpicePerMonitorReserve(162), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3712") PowerClientAutoAdjustMemoryMaxMemory(163), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("64") PowerClientAutoAdjustMemoryModulus(164), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") PowerClientAutoInstallCertificateOnApprove(165), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientAllowRunningGuestsWithoutTools(166), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientLogDetection(167), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") LogVdsRegistration(168), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") DebugTimerLogging(169), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") DebugSearchLogging(170), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") PowerClientAutoApprovePatterns(171), @TypeConverterAttribute(String.class) @DefaultValueAttribute("99408929-82CF-4DC7-A532-9D998063FA95") PowerClientAutoRegistrationDefaultVdsGroupID(172), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientRunVmShouldVerifyPendingVMsAsWell(173), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") PowerClientDedicatedVmLaunchOnVdsWhilePowerClientStarts(174), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") RenewGuestIpOnVdsSubnetChange(176), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") RenewGuestIpOnVdsSubnetChangeOnParseError(177), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") PowerClientUserPortalVdcManagedSpiceState(178), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") StoragePoolRefreshTimeInSeconds(179), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") StoragePoolNonOperationalResetTimeoutInMin(180), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1") StorageDomainFalureTimeoutInMinutes(181), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ComputerADPaths(182), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ENMailHost(183), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("-1") ENMailPort(184), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ENMailUser(185), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password) ENMailPassword(186), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ENMailDomain(187), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") ENMailIsBodyHtml(188), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") ENMailEnableSsl(189), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") ENMailUseDefaultCredentials(190), @TypeConverterAttribute(String.class) @DefaultValueAttribute("System,Sparse,COW,true;Data,Preallocated,RAW,false;Shared,Preallocated,RAW,false;Swap,Preallocated,RAW,false;Temp,Sparse,COW,false") DiskConfigurationList(191), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") SPMFailOverAttempts(192), @TypeConverterAttribute(String.class) @DefaultValueAttribute("ON") LockPolicy(193), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("5") LockRenewalIntervalSec(194), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") LeaseTimeSec(195), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") IoOpTimeoutSec(196), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") LeaseRetries(197), @TypeConverterAttribute(String.class) @DefaultValueAttribute("en-us") VncKeyboardLayout(203), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") SpmCommandFailOverRetries(204), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1") SpmVCpuConsumption(205), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") RedirectServletReportsPage(251), @TypeConverterAttribute(String.class) @DefaultValueAttribute("Reports not installed, please contact your administrator") RedirectServletReportsPageError(252), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") EnableSpiceRootCertificateValidation(206), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("100000") MaxMacsCountInPool(207), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") NumberOfVmsForTopSizeVms(208), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("256") VMMinMemorySizeInMB(210), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("20480") VM32BitMaxMemorySizeInMB(211), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("262144") VM64BitMaxMemorySizeInMB(212), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("0") BlockMigrationOnSwapUsagePercentage(213), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") EnableSwapCheck(214), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") SendSMPOnRunVm(215), @TypeConverterAttribute(String.class) @DefaultValueAttribute("pc") EmulatedMachine(216), @TypeConverterAttribute(String.class) @DefaultValueAttribute(" WHERE RowNum BETWEEN %1$s AND %2$s") @OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine, realValue = "PagingSyntax") DBPagingSyntax(217), @TypeConverterAttribute(String.class) @DefaultValueAttribute("Range") @OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine, realValue = "PagingType") DBPagingType(218), @TypeConverterAttribute(String.class) @DefaultValueAttribute("SELECT * FROM (SELECT *, ROW_NUMBER() OVER(%1$s) as RowNum FROM (%2$s)) as T1 ) as T2 %3$s") @OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine, realValue = "SearchTemplate") DBSearchTemplate(219), // Do not use those (used internally) @TypeConverterAttribute(String.class) @DefaultValueAttribute(" WHERE RowNum BETWEEN {0} AND {1}") SQLServerPagingSyntax(220), @TypeConverterAttribute(String.class) @DefaultValueAttribute("Range") SQLServerPagingType(221), @TypeConverterAttribute(String.class) @DefaultValueAttribute("SELECT * FROM (SELECT *, ROW_NUMBER() OVER({0}) as RowNum FROM ( {1})) as T1 ) as T2 {2}") SQLServerSearchTemplate(222), @TypeConverterAttribute(String.class) @DefaultValueAttribute(" OFFSET {0} LIMIT {1}") PostgresPagingSyntax(223), @TypeConverterAttribute(String.class) @DefaultValueAttribute("Offset") PostgresPagingType(224), @TypeConverterAttribute(String.class) @DefaultValueAttribute("SELECT * FROM ( {1}) as T1 {2}") PostgresSearchTemplate(225), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") LogDBCommands(226), @TypeConverterAttribute(java.util.HashSet.class) @DefaultValueAttribute("4.4,4.5") @OptionBehaviourAttribute(behaviour = OptionBehaviour.CommaSeparatedVersionArray) SupportedVDSMVersions(227), @TypeConverterAttribute(java.util.HashSet.class) @DefaultValueAttribute("2.2,3.0") @OptionBehaviourAttribute(behaviour = OptionBehaviour.CommaSeparatedVersionArray) SupportedClusterLevels(228), @TypeConverterAttribute(String.class) @DefaultValueAttribute("ENGINE") OvfVirtualSystemType(229), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("60") WaitForVdsInitInSec(230), // JTODO - temporarily using values from 256 for Java specific options @TypeConverterAttribute(String.class) @DefaultValueAttribute(".keystore") keystoreUrl(256), // TODO: handle password behavior @TypeConverterAttribute(String.class) @DefaultValueAttribute("NoSoup4U") // @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password) keystorePass(257), @TypeConverterAttribute(String.class) @DefaultValueAttribute(".keystore") TruststoreUrl(258), @TypeConverterAttribute(String.class) @DefaultValueAttribute("NoSoup4U") @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password) TruststorePass(259), @TypeConverterAttribute(String.class) @DefaultValueAttribute("(GMT) GMT Standard Time") DefaultTimeZone(260), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") AsyncPollingCyclesBeforeRefreshSuspend(261), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("120") AsyncPollingCyclesBeforeCallbackCleanup(262), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("389") LDAPServerPort(263), @TypeConverterAttribute(String.class) @DefaultValueAttribute("SignReq.bat") SignScriptName(264), @TypeConverterAttribute(String.class) @DefaultValueAttribute("engine") CertAlias(265), @TypeConverterAttribute(String.class) @DefaultValueAttribute("8080") PublicURLPort(266), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") SANWipeAfterDelete(267), @TypeConverterAttribute(String.class) @DefaultValueAttribute("c:\\Program Files\\RedHat\\oVirt Engine") ConfigDir(267), @TypeConverterAttribute(String.class) @DefaultValueAttribute("c:\\Program Files\\RedHat\\oVirt Engine\\Service") DataDir(268), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") UserSessionTimeOutInterval(269), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/data/images/rhev") RhevhLocalFSPath(290), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") SupportCustomProperties(289), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") LocalStorageEnabled(270), @TypeConverterAttribute(String.class) @DefaultValueAttribute("default:GSSAPI") LDAPSecurityAuthentication(271), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") UserDefinedVMProperties(272), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") PredefinedVMProperties(273), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("250") MaxNumberOfHostsInStoragePool(274), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("true") LimitNumberOfNetworkInterfaces(275), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("15") MaxVmNameLengthWindows(276), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("64") MaxVmNameLengthNonWindows(277), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("255") MaxVdsNameLength(278), @TypeConverterAttribute(Double.class) @DefaultValueAttribute("30") MaxStorageVdsTimeoutCheckSec(279), @TypeConverterAttribute(Double.class) @DefaultValueAttribute("5") MaxStorageVdsDelayCheckSec(280), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("300") DisableFenceAtStartupInSec(281), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("60") NicDHCPDelayGraceInMS(282), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") FindFenceProxyRetries(283), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") FindFenceProxyDelayBetweenRetriesInSec(284), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1024") LogPhysicalMemoryThresholdInMB(285), @TypeConverterAttribute(String.class) @DefaultValueAttribute("auth-conf") SASL_QOP(286), @TypeConverterAttribute(String.class) @DefaultValueAttribute("%JBOSS_HOME%\\standalone\\deployments\\engine.ear") ENGINEEarLib(287), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1000") LdapQueryPageSize(288), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("100") MaxLDAPQueryPartsNumber(289), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") FenceStartStatusRetries(290), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("60") FenceStartStatusDelayBetweenRetriesInSec(291), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") LDAPQueryTimeout(292), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") SupportGetDevicesVisibility(293), @TypeConverterAttribute(String.class) @DefaultValueAttribute("default,ich6") DesktopAudioDeviceType(294), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") SupportStorageFormat(295), @TypeConverterAttribute(String.class) @DefaultValueAttribute("0") SupportedStorageFormats(296), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") UseRtl8139_pv(297), @TypeConverterAttribute(String.class) @DefaultValueAttribute("ILIKE") @OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine, realValue = "LikeSyntax") DBLikeSyntax(298), @TypeConverterAttribute(String.class) @DefaultValueAttribute("LIKE") SQLServerLikeSyntax(299), @TypeConverterAttribute(String.class) @DefaultValueAttribute("ILIKE") PostgresLikeSyntax(300), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") @OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine, realValue = "I18NPrefix") DBI18NPrefix(301), @TypeConverterAttribute(String.class) @DefaultValueAttribute("N") SQLI18NPrefix(302), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") PostgresI18NPrefix(303), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("60000") UknownTaskPrePollingLapse(304), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") LdapServers(305), @TypeConverterAttribute(String.class) @DefaultValueAttribute("/var/lock/ovirt-engine/.openssl.exclusivelock") SignLockFile(306), @TypeConverterAttribute(String.class) @DefaultValueAttribute("3.0.0.0") ProductRPMVersion(307), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10000") MaxAuditLogMessageLength(308), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") SysPrepDefaultUser(309), @TypeConverterAttribute(String.class) @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password) @DefaultValueAttribute("") SysPrepDefaultPassword(310), @TypeConverterAttribute(String.class) @DefaultValueAttribute("ilo3=ipmilan") FenceAgentMapping(311), @TypeConverterAttribute(String.class) @DefaultValueAttribute("ilo3:lanplus,power_wait=4") FenceAgentDefaultParams(312), @TypeConverterAttribute(String.class) @DefaultValueAttribute("admin") AdminUser(313), @TypeConverterAttribute(String.class) @DefaultValueAttribute("internal") AdminDomain(314), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password) AdminPassword(315), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("30") SignCertTimeoutInSeconds(316), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("600") OtpExpirationInSeconds(317), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("20") ConnectToServerTimeoutInSeconds(318), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") IPTablesConfig(319), @TypeConverterAttribute(String.class) @DefaultValueAttribute("") CustomPublicConfig_AppsWebSite(320), /** * Lower threshold for disk space on host to be considered low, in MB. */ @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1000") VdsLocalDisksLowFreeSpace(321), /** * Lower threshold for disk space on host to be considered critically low (almost out of space), in MB. */ @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("500") VdsLocalDisksCriticallyLowFreeSpace(322), /** * The minimal size of the internal thread pool. Minimal number of threads in pool */ @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("50") DefaultMinThreadPoolSize(323), /** * The size of the internal thread pool */ @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("500") DefaultMaxThreadPoolSize(324), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") IsNeedSupportForOldVgAPI(325), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("1") InitStorageSparseSizeInGB(326), @TypeConverterAttribute(String.class) @DefaultValueAttribute("ovirtmgmt") ManagementNetwork(328), @TypeConverterAttribute(String.class) @DefaultValueAttribute("5.8") OvirtInitialSupportedIsoVersion(329), @TypeConverterAttribute(String.class) @DefaultValueAttribute("rhevh") OvirtIsoPrefix(330), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("80") QuotaThresholdVdsGroup(331), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("80") QuotaThresholdStorage(332), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("20") QuotaGraceVdsGroup(333), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("20") QuotaGraceStorage(334), /** * The base URL for the documentation web-site */ @TypeConverterAttribute(String.class) @DefaultValueAttribute("") DocsURL(335), // This value indicates devices that although are given to us by VDSM // are still treated as managed devices // This should be a [device=<device> type=<type>[,]]* string @TypeConverterAttribute(String.class) @DefaultValueAttribute("") ManagedDevicesWhiteList(336), /** * The origin type to be used for VM and VM template creation */ @TypeConverterAttribute(String.class) @DefaultValueAttribute("OVIRT") OriginType(336), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") HotPlugEnabled(337), @TypeConverterAttribute(String.class) @DefaultValueAttribute("Windows2008,Windows2008x64,Windows2008R2x64,RHEL5,RHEL5x64,RHEL6,RHEL6x64") HotPlugSupportedOsList(338), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") AllowDuplicateMacAddresses(339), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") SetupNetworksPollingTimeout(340), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") JobCleanupRateInMinutes(341), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("10") SucceededJobCleanupTimeInMinutes(342), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("60") FailedJobCleanupTimeInMinutes(343), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("100") JobPageSize(344), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("50") MaxCorrelationIdLength(345), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("5") VmPoolMonitorIntervalInMinutes(344), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("5") VmPoolMonitorBatchSize(345), @TypeConverterAttribute(String.class) @DefaultValueAttribute("0 0/5 * * * ?") AutoRecoverySchedule(346), @TypeConverterAttribute(Integer.class) @DefaultValueAttribute("3") VmPoolMonitorMaxAttempts(347), @TypeConverterAttribute(Boolean.class) @DefaultValueAttribute("false") LiveSnapshotEnabled(348), Invalid(65535); private int intValue; private static Map<Integer, ConfigValues> mappings; static { mappings = new HashMap<Integer, ConfigValues>(); for (ConfigValues action : values()) { mappings.put(action.getValue(), action); } } private ConfigValues(int value) { intValue = value; } public int getValue() { return intValue; } public static ConfigValues forValue(int value) { return mappings.get(value); } }
package ru.r2cloud.jradio.technosat; import java.io.DataInputStream; import java.io.IOException; public class StdTmAOCS { private byte nodeNo; // redundant node number private boolean rstEn; // the watchdog application is enabled to reset the node private byte botSlt; // currently running internal software slot private boolean synPps; // shall the node synchronize with the PPS signal private boolean disUTC; // shall the node distribute the UTC time at the next PPS signal private boolean dulBsy; // Indicates the state of the UploadManagers Flash Controller private AcsMode acsMode; // private boolean mfsReceived; // True, if data from the magnetic field sensor system was received in the current control cycle private boolean sssReceived; // True, if data from the Sun sensor system was received in the current control cycle private boolean gyrReceived; // True, if data from the MEMS gyro system was received in the current control cycle private boolean forReceived; // True, if data from the fiber optic rate sensor system was received in the current control cycle private boolean strReceived; // True, if data from the star tracker system was received in the current control cycle private boolean mtsReceived; // True, if data from the magnetic torquer system was received in the current control cycle private boolean rw0Received; // True, if data from the reaction wheel 0 was received in the current control cycle private boolean rw1Received; // True, if data from the reaction wheel 1 was received in the current control cycle private boolean rw2Received; // True, if data from the reaction wheel 2 was received in the current control cycle private boolean rw3Received; // True, if data from the reaction wheel 3 was received in the current control cycle private float stdQS; // private float stdQX; // private float stdQY; // private float stdQZ; // private float stdRateX; // private float stdRateY; // private float stdRateZ; // private int stdRX; // private int stdRY; // private int stdRZ; // public StdTmAOCS(DataInputStream dis) throws IOException { int raw = dis.readUnsignedByte(); nodeNo = (byte) (raw >> 7); rstEn = ((raw >> 6) & 0x1) > 0; botSlt = (byte) ((raw >> 3) & 0x7); synPps = ((raw >> 2) & 0x1) > 0; disUTC = ((raw >> 1) & 0x1) > 0; dulBsy = (raw & 0x1) > 0; raw = dis.readUnsignedByte(); acsMode = AcsMode.valueOfCode(raw >> 3); mfsReceived = ((raw >> 2) & 0x1) > 0; sssReceived = ((raw >> 1) & 0x1) > 0; gyrReceived = (raw & 0x1) > 0; raw = dis.readUnsignedByte(); forReceived = ((raw >> 7) & 0x1) > 0; strReceived = ((raw >> 6) & 0x1) > 0; // skip 1 bit mtsReceived = ((raw >> 4) & 0x1) > 0; rw0Received = ((raw >> 3) & 0x1) > 0; rw1Received = ((raw >> 2) & 0x1) > 0; rw2Received = ((raw >> 1) & 0x1) > 0; rw3Received = (raw & 0x1) > 0; stdQS = dis.readShort() * 0.0001f; stdQX = dis.readShort() * 0.0001f; stdQY = dis.readShort() * 0.0001f; stdQZ = dis.readShort() * 0.0001f; stdRateX = dis.readByte() * 0.2f; stdRateY = dis.readByte() * 0.2f; stdRateZ = dis.readByte() * 0.2f; stdRX = dis.readByte() * 100; stdRY = dis.readByte() * 100; stdRZ = dis.readByte() * 100; } public byte getNodeNo() { return nodeNo; } public void setNodeNo(byte nodeNo) { this.nodeNo = nodeNo; } public boolean isRstEn() { return rstEn; } public void setRstEn(boolean rstEn) { this.rstEn = rstEn; } public byte getBotSlt() { return botSlt; } public void setBotSlt(byte botSlt) { this.botSlt = botSlt; } public boolean isSynPps() { return synPps; } public void setSynPps(boolean synPps) { this.synPps = synPps; } public boolean isDisUTC() { return disUTC; } public void setDisUTC(boolean disUTC) { this.disUTC = disUTC; } public boolean isDulBsy() { return dulBsy; } public void setDulBsy(boolean dulBsy) { this.dulBsy = dulBsy; } public AcsMode getAcsMode() { return acsMode; } public void setAcsMode(AcsMode acsMode) { this.acsMode = acsMode; } public boolean isMfsReceived() { return mfsReceived; } public void setMfsReceived(boolean mfsReceived) { this.mfsReceived = mfsReceived; } public boolean isSssReceived() { return sssReceived; } public void setSssReceived(boolean sssReceived) { this.sssReceived = sssReceived; } public boolean isGyrReceived() { return gyrReceived; } public void setGyrReceived(boolean gyrReceived) { this.gyrReceived = gyrReceived; } public boolean isForReceived() { return forReceived; } public void setForReceived(boolean forReceived) { this.forReceived = forReceived; } public boolean isStrReceived() { return strReceived; } public void setStrReceived(boolean strReceived) { this.strReceived = strReceived; } public boolean isMtsReceived() { return mtsReceived; } public void setMtsReceived(boolean mtsReceived) { this.mtsReceived = mtsReceived; } public boolean isRw0Received() { return rw0Received; } public void setRw0Received(boolean rw0Received) { this.rw0Received = rw0Received; } public boolean isRw1Received() { return rw1Received; } public void setRw1Received(boolean rw1Received) { this.rw1Received = rw1Received; } public boolean isRw2Received() { return rw2Received; } public void setRw2Received(boolean rw2Received) { this.rw2Received = rw2Received; } public boolean isRw3Received() { return rw3Received; } public void setRw3Received(boolean rw3Received) { this.rw3Received = rw3Received; } public float getStdQS() { return stdQS; } public void setStdQS(float stdQS) { this.stdQS = stdQS; } public float getStdQX() { return stdQX; } public void setStdQX(float stdQX) { this.stdQX = stdQX; } public float getStdQY() { return stdQY; } public void setStdQY(float stdQY) { this.stdQY = stdQY; } public float getStdQZ() { return stdQZ; } public void setStdQZ(float stdQZ) { this.stdQZ = stdQZ; } public float getStdRateX() { return stdRateX; } public void setStdRateX(float stdRateX) { this.stdRateX = stdRateX; } public float getStdRateY() { return stdRateY; } public void setStdRateY(float stdRateY) { this.stdRateY = stdRateY; } public float getStdRateZ() { return stdRateZ; } public void setStdRateZ(float stdRateZ) { this.stdRateZ = stdRateZ; } public int getStdRX() { return stdRX; } public void setStdRX(int stdRX) { this.stdRX = stdRX; } public int getStdRY() { return stdRY; } public void setStdRY(int stdRY) { this.stdRY = stdRY; } public int getStdRZ() { return stdRZ; } public void setStdRZ(int stdRZ) { this.stdRZ = stdRZ; } }
/* * Copyright (c) 2014-2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts; import static org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EditPartConstants.BAM_MEDIATOR_ICON_PATH; import static org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EditPartConstants.DEFAULT_PROPERTY_VALUE_TEXT; import org.apache.commons.lang.StringUtils; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.Label; import org.eclipse.draw2d.PositionConstants; import org.eclipse.draw2d.Shape; import org.eclipse.draw2d.ToolbarLayout; import org.eclipse.gef.EditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.Request; import org.eclipse.gef.commands.Command; import org.eclipse.gef.editpolicies.LayoutEditPolicy; import org.eclipse.gef.editpolicies.NonResizableEditPolicy; import org.eclipse.gef.requests.CreateRequest; import org.eclipse.gmf.runtime.diagram.ui.editparts.IBorderItemEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.BorderItemSelectionEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CreationEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.DragDropEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles; import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator; import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout; import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel; import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure; import org.eclipse.gmf.runtime.notation.View; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShapeWithLabel; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedBorderItemLocator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedSizedAbstractMediator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.ShowPropertyViewEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.BAMMediatorCanonicalEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.BAMMediatorItemSemanticEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; /** * @generated NOT */ public class BAMMediatorEditPart extends FixedSizedAbstractMediator { /** * @generated */ public static final int VISUAL_ID = 3680; /** * @generated */ protected IFigure contentPane; /** * @generated */ public BAMMediatorEditPart(View view) { super(view); } /** * @generated NOT */ protected void createDefaultEditPolicies() { installEditPolicy(EditPolicyRoles.CREATION_ROLE, new CreationEditPolicy()); super.createDefaultEditPolicies(); installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new BAMMediatorItemSemanticEditPolicy()); installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE, new DragDropEditPolicy()); installEditPolicy(EditPolicyRoles.CANONICAL_ROLE, new BAMMediatorCanonicalEditPolicy()); installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy()); // For handle Double click Event. installEditPolicy(EditPolicyRoles.OPEN_ROLE, new ShowPropertyViewEditPolicy()); // XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable editpolicies // removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE); } /** * @generated */ protected LayoutEditPolicy createLayoutEditPolicy() { org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() { protected EditPolicy createChildEditPolicy(EditPart child) { View childView = (View) child.getModel(); switch (EsbVisualIDRegistry.getVisualID(childView)) { case BAMMediatorInputConnectorEditPart.VISUAL_ID: case BAMMediatorOutputConnectorEditPart.VISUAL_ID: return new BorderItemSelectionEditPolicy(); } EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE); if (result == null) { result = new NonResizableEditPolicy(); } return result; } protected Command getMoveChildrenCommand(Request request) { return null; } protected Command getCreateCommand(CreateRequest request) { return null; } }; return lep; } /** * @generated NOT */ protected IFigure createNodeShape() { return primaryShape = new BAMMediatorFigure() { public void setBounds(org.eclipse.draw2d.geometry.Rectangle rect) { super.setBounds(rect); if (this.getBounds().getLocation().x != 0 && this.getBounds().getLocation().y != 0) { connectToMostSuitableElement(); reAllocate(rect); } }; }; } /** * @generated */ public BAMMediatorFigure getPrimaryShape() { return (BAMMediatorFigure) primaryShape; } protected boolean addFixedChild(EditPart childEditPart) { if (childEditPart instanceof BAMMediatorDescriptionEditPart) { ((BAMMediatorDescriptionEditPart) childEditPart).setLabel(getPrimaryShape() .getBAMMediatorDescriptionLabel()); return true; } if (childEditPart instanceof BAMMediatorInputConnectorEditPart) { IFigure borderItemFigure = ((BAMMediatorInputConnectorEditPart) childEditPart).getFigure(); BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.WEST, 0.5); getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator); return true; } if (childEditPart instanceof BAMMediatorOutputConnectorEditPart) { IFigure borderItemFigure = ((BAMMediatorOutputConnectorEditPart) childEditPart).getFigure(); BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.EAST, 0.5); getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator); return true; } return false; } protected boolean removeFixedChild(EditPart childEditPart) { if (childEditPart instanceof BAMMediatorDescriptionEditPart) { return true; } if (childEditPart instanceof BAMMediatorInputConnectorEditPart) { getBorderedFigure().getBorderItemContainer().remove( ((BAMMediatorInputConnectorEditPart) childEditPart).getFigure()); return true; } if (childEditPart instanceof BAMMediatorOutputConnectorEditPart) { getBorderedFigure().getBorderItemContainer().remove( ((BAMMediatorOutputConnectorEditPart) childEditPart).getFigure()); return true; } return false; } protected void addChildVisual(EditPart childEditPart, int index) { if (addFixedChild(childEditPart)) { return; } super.addChildVisual(childEditPart, -1); } protected void removeChildVisual(EditPart childEditPart) { if (removeFixedChild(childEditPart)) { return; } super.removeChildVisual(childEditPart); } protected IFigure getContentPaneFor(IGraphicalEditPart editPart) { if (editPart instanceof IBorderItemEditPart) { return getBorderedFigure().getBorderItemContainer(); } return getContentPane(); } /** * @generated NOT */ /*protected NodeFigure createNodePlate() { DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(40, 40); return result; }*/ /** * Creates figure for this edit part. * * Body of this method does not depend on settings in generation model * so you may safely remove <i>generated</i> tag and modify it. * * @generated NOT */ protected NodeFigure createMainFigure() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new ToolbarLayout(true)); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; } /** * Default implementation treats passed figure as content pane. * Respects layout one may have set for generated figure. * @param nodeShape instance of generated figure class * @generated */ protected IFigure setupContentPane(IFigure nodeShape) { if (nodeShape.getLayoutManager() == null) { ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout(); layout.setSpacing(5); nodeShape.setLayoutManager(layout); } return nodeShape; // use nodeShape itself as contentPane } /** * @generated */ public IFigure getContentPane() { if (contentPane != null) { return contentPane; } return super.getContentPane(); } /** * @generated */ protected void setForegroundColor(Color color) { if (primaryShape != null) { primaryShape.setForegroundColor(color); } } /** * @generated */ protected void setBackgroundColor(Color color) { if (primaryShape != null) { primaryShape.setBackgroundColor(color); } } /** * @generated */ protected void setLineWidth(int width) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineWidth(width); } } /** * @generated */ protected void setLineType(int style) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineStyle(style); } } /** * @generated */ public class BAMMediatorFigure extends EsbGraphicalShapeWithLabel { private WrappingLabel bamMediatorDescriptionLabel; /** * @generated */ public BAMMediatorFigure() { this.setBackgroundColor(THIS_BACK); createContents(); } /** * @generated NOT */ private void createContents() { WrappingLabel bAMPropertyLabel0 = new WrappingLabel(); bAMPropertyLabel0.setText(DEFAULT_PROPERTY_VALUE_TEXT); bAMPropertyLabel0.setAlignment(SWT.CENTER); bamMediatorDescriptionLabel = getPropertyNameLabel(); } public WrappingLabel getBAMMediatorDescriptionLabel() { return bamMediatorDescriptionLabel; } public String getIconPath() { return BAM_MEDIATOR_ICON_PATH; } public String getNodeName() { return Messages.BAMMediatorEditPart_NodeName; } public IFigure getToolTip() { if (StringUtils.isEmpty(toolTipMessage)) { return new Label(Messages.BAMMediatorEditPart_ToolTipMessage); } else { return new Label(toolTipMessage); } } } /** * @generated */ static final Color THIS_BACK = new Color(null, 230, 230, 230); }
// Copyright 2010-2015, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package org.mozc.android.inputmethod.japanese.ui; import org.mozc.android.inputmethod.japanese.protobuf.ProtoCandidates.CandidateList; import org.mozc.android.inputmethod.japanese.protobuf.ProtoCandidates.CandidateWord; import org.mozc.android.inputmethod.japanese.ui.CandidateLayout.Row; import org.mozc.android.inputmethod.japanese.ui.CandidateLayout.Span; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Layouts the conversion candidate words. * * First, all the rows this layouter creates are split into "chunk"s. * The width of each chunk is equal to {@code pageWidth / numChunks} evenly. * Next, the candidates are assigned to chunks. The order of the candidates is kept. * A candidate may occupy one or more successive chunks which are on the same row. * * The height of each row is round up to integer, so that the snap-paging * should work well. * */ public class ConversionCandidateLayouter implements CandidateLayouter { /** * The metrics between chunk and span. * * The main purpose of this class is to inject the chunk compression * heuristics for testing. */ static class ChunkMetrics { private final float chunkWidth; private final float compressionRatio; private final float horizontalPadding; private final float minWidth; ChunkMetrics(float chunkWidth, float compressionRatio, float horizontalPadding, float minWidth) { this.chunkWidth = chunkWidth; this.compressionRatio = compressionRatio; this.horizontalPadding = horizontalPadding; this.minWidth = minWidth; } /** Returns the number of chunks which the span would consume. */ int getNumChunks(Span span) { Preconditions.checkNotNull(span); float compressedValueWidth = compressValueWidth(span.getValueWidth(), compressionRatio, horizontalPadding, minWidth); return (int) Math.ceil((compressedValueWidth + span.getDescriptionWidth()) / chunkWidth); } static float compressValueWidth( float valueWidth, float compressionRatio, float horizontalPadding, float minWidth) { // Sum of geometric progression. // a == 1.0 (default pixel size) // r == candidateWidthCompressionRate (pixel width decay rate) // n == defaultWidth if (compressionRatio != 1) { valueWidth = (1f - (float) Math.pow(compressionRatio, valueWidth)) / (1f - compressionRatio); } return Math.max(valueWidth + horizontalPadding * 2, minWidth); } } private Optional<SpanFactory> spanFactory = Optional.absent(); /** Horizontal common ratio of the value size. */ private float valueWidthCompressionRate; /** Minimum width of the value. */ private float minValueWidth; /** The Minimum width of the chunk. */ private float minChunkWidth; /** Height of the value. */ private float valueHeight; private float valueHorizontalPadding; private float valueVerticalPadding; /** The current view's width. */ private int viewWidth; private boolean reserveEmptySpan = false; /** * @param spanFactory the spanFactory to set */ public void setSpanFactory(SpanFactory spanFactory) { this.spanFactory = Optional.of(Preconditions.checkNotNull(spanFactory)); } /** * @param valueWidthCompressionRate the valueWidthCompressionRate to set */ public void setValueWidthCompressionRate(float valueWidthCompressionRate) { this.valueWidthCompressionRate = valueWidthCompressionRate; } /** * @param minValueWidth the minValueWidth to set */ public void setMinValueWidth(float minValueWidth) { this.minValueWidth = minValueWidth; } /** * @param minChunkWidth the minChunkWidth to set */ public void setMinChunkWidth(float minChunkWidth) { this.minChunkWidth = minChunkWidth; } /** * @param valueHeight the valueHeight to set */ public void setValueHeight(float valueHeight) { this.valueHeight = valueHeight; } /** * @param valueHorizontalPadding the valueHorizontalPadding to set */ public void setValueHorizontalPadding(float valueHorizontalPadding) { this.valueHorizontalPadding = valueHorizontalPadding; } /** * @param valueVerticalPadding the valueVerticalPadding to set */ public void setValueVerticalPadding(float valueVerticalPadding) { this.valueVerticalPadding = valueVerticalPadding; } @Override public boolean setViewSize(int width, int height) { if (viewWidth == width) { // Doesn't need to invalidate the layout if the width isn't changed. return false; } viewWidth = width; return true; } private int getNumChunks() { return (int) (viewWidth / minChunkWidth); } public float getChunkWidth() { return viewWidth / (float) getNumChunks(); } @Override public int getPageWidth() { return Math.max(viewWidth, 0); } public int getRowHeight() { return (int) Math.ceil(valueHeight + valueVerticalPadding * 2); } @Override public int getPageHeight() { return getRowHeight(); } @Override public Optional<CandidateLayout> layout(CandidateList candidateList) { Preconditions.checkNotNull(candidateList); if (minChunkWidth <= 0 || viewWidth <= 0 || candidateList.getCandidatesCount() == 0 || !spanFactory.isPresent()) { return Optional.<CandidateLayout>absent(); } int numChunks = getNumChunks(); float chunkWidth = getChunkWidth(); ChunkMetrics chunkMetrics = new ChunkMetrics( chunkWidth, valueWidthCompressionRate, valueHorizontalPadding, minValueWidth); List<Row> rowList = buildRowList(candidateList, spanFactory.get(), numChunks, chunkMetrics, reserveEmptySpan); int[] numAllocatedChunks = new int[numChunks]; boolean isFirst = reserveEmptySpan; for (Row row : rowList) { layoutSpanList( row.getSpanList(), (isFirst ? (viewWidth - (int) chunkWidth) : viewWidth), (isFirst ? numChunks - 1 : numChunks), chunkMetrics, numAllocatedChunks); isFirst = false; } // Push empty span at the end of the first row. if (reserveEmptySpan) { Span emptySpan = new Span(Optional.<CandidateWord>absent(), 0, 0, Collections.<String>emptyList()); List<Span> spanList = rowList.get(0).getSpanList(); emptySpan.setLeft(spanList.get(spanList.size() - 1).getRight()); emptySpan.setRight(viewWidth); rowList.get(0).addSpan(emptySpan); } // In order to snap the scrolling on any row boundary, rounding up the rowHeight // to align pixels. int rowHeight = getRowHeight(); layoutRowList(rowList, viewWidth, rowHeight); return Optional.of(new CandidateLayout(rowList, viewWidth, rowHeight * rowList.size())); } /** * Builds the row list based on the number of estimated chunks for each span. * * The order of the candidates will be kept. */ @VisibleForTesting static List<Row> buildRowList( CandidateList candidateList, SpanFactory spanFactory, int numChunks, ChunkMetrics chunkMetrics, boolean enableSpan) { Preconditions.checkNotNull(candidateList); Preconditions.checkNotNull(spanFactory); Preconditions.checkNotNull(chunkMetrics); List<Row> rowList = new ArrayList<Row>(); int numRemainingChunks = 0; for (CandidateWord candidateWord : candidateList.getCandidatesList()) { Span span = spanFactory.newInstance(candidateWord); int numSpanChunks = chunkMetrics.getNumChunks(span); if (numRemainingChunks < numSpanChunks) { // There is no space on the current row to put the current span. // Create a new row. numRemainingChunks = numChunks; // For the first line, we reserve a chunk at right-top in order to place an icon // button for folding/expanding keyboard. if (enableSpan && rowList.isEmpty()) { numRemainingChunks--; } rowList.add(new Row()); } // Add the span to the last row. rowList.get(rowList.size() - 1).addSpan(span); numRemainingChunks -= numSpanChunks; } return rowList; } /** * Sets left and right of each span. The left and right should be aligned to the chunks. * Also, the right of the last span should be equal to {@code pageWidth}. * * In order to avoid integer array memory allocation (as this method will be invoked * many times to layout a {@link CandidateList}), it is necessary to pass an integer * array for the calculation buffer, {@code numAllocatedChunks}. * The size of the buffer must be equal to or greater than {@code spanList.size()}. * Its elements needn't be initialized. */ @VisibleForTesting static void layoutSpanList( List<Span> spanList, int pageWidth, int numChunks, ChunkMetrics chunkMetrics, int[] numAllocatedChunks) { Preconditions.checkNotNull(spanList); Preconditions.checkNotNull(chunkMetrics); Preconditions.checkNotNull(numAllocatedChunks); Preconditions.checkArgument(spanList.size() <= numAllocatedChunks.length); int numRemainingChunks = numChunks; // First, allocate the chunks based on the metrics. { int index = 0; for (Span span : spanList) { int numSpanChunks = Math.min(numRemainingChunks, chunkMetrics.getNumChunks(span)); numAllocatedChunks[index] = numSpanChunks; numRemainingChunks -= numSpanChunks; ++index; } } // Then assign remaining chunks to each span as even as possible by round-robin. for (int index = 0; numRemainingChunks > 0; --numRemainingChunks, index = (index + 1) % spanList.size()) { ++numAllocatedChunks[index]; } // Set the actual left and right to each span. { int index = 0; float left = 0; float spanWidth = pageWidth / (float) numChunks; int cumulativeNumAllocatedChunks = 0; for (Span span : spanList) { cumulativeNumAllocatedChunks += numAllocatedChunks[index++]; float right = Math.min(spanWidth * cumulativeNumAllocatedChunks, pageWidth); span.setLeft(left); span.setRight(right); left = right; } } // Set the right of the last element to the pageWidth to align the page. spanList.get(spanList.size() - 1).setRight(pageWidth); } /** Sets top, width and height to the each row. */ @VisibleForTesting static void layoutRowList(List<Row> rowList, int pageWidth, int rowHeight) { int top = 0; for (Row row : Preconditions.checkNotNull(rowList)) { row.setTop(top); row.setWidth(pageWidth); row.setHeight(rowHeight); top += rowHeight; } } public void reserveEmptySpanForInputFoldButton(boolean reserveEmptySpan) { this.reserveEmptySpan = reserveEmptySpan; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.utils; import java.io.DataInput; import java.io.DataOutput; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.util.Arrays; import static com.google.common.base.Charsets.UTF_8; import org.apache.cassandra.io.util.FileDataInput; import org.apache.cassandra.io.util.FileUtils; import org.apache.commons.lang.ArrayUtils; /** * Utility methods to make ByteBuffers less painful * The following should illustrate the different ways byte buffers can be used * * public void testArrayOffet() * { * * byte[] b = "test_slice_array".getBytes(); * ByteBuffer bb = ByteBuffer.allocate(1024); * * assert bb.position() == 0; * assert bb.limit() == 1024; * assert bb.capacity() == 1024; * * bb.put(b); * * assert bb.position() == b.length; * assert bb.remaining() == bb.limit() - bb.position(); * * ByteBuffer bb2 = bb.slice(); * * assert bb2.position() == 0; * * //slice should begin at other buffers current position * assert bb2.arrayOffset() == bb.position(); * * //to match the position in the underlying array one needs to * //track arrayOffset * assert bb2.limit()+bb2.arrayOffset() == bb.limit(); * * * assert bb2.remaining() == bb.remaining(); * * } * * } * */ public class ByteBufferUtil { public static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.wrap(ArrayUtils.EMPTY_BYTE_ARRAY); public static int compareUnsigned(ByteBuffer o1, ByteBuffer o2) { assert o1 != null; assert o2 != null; int minLength = Math.min(o1.remaining(), o2.remaining()); for (int x = 0, i = o1.position(), j = o2.position(); x < minLength; x++, i++, j++) { if (o1.get(i) == o2.get(j)) continue; // compare non-equal bytes as unsigned return (o1.get(i) & 0xFF) < (o2.get(j) & 0xFF) ? -1 : 1; } return (o1.remaining() == o2.remaining()) ? 0 : ((o1.remaining() < o2.remaining()) ? -1 : 1); } public static int compare(byte[] o1, ByteBuffer o2) { return compareUnsigned(ByteBuffer.wrap(o1), o2); } public static int compare(ByteBuffer o1, byte[] o2) { return compareUnsigned(o1, ByteBuffer.wrap(o2)); } /** * Decode a String representation. * This method assumes that the encoding charset is UTF_8. * * @param buffer a byte buffer holding the string representation * @return the decoded string */ public static String string(ByteBuffer buffer) throws CharacterCodingException { return string(buffer, UTF_8); } /** * Decode a String representation. * This method assumes that the encoding charset is UTF_8. * * @param buffer a byte buffer holding the string representation * @param position the starting position in {@code buffer} to start decoding from * @param length the number of bytes from {@code buffer} to use * @return the decoded string */ public static String string(ByteBuffer buffer, int position, int length) throws CharacterCodingException { return string(buffer, position, length, UTF_8); } /** * Decode a String representation. * * @param buffer a byte buffer holding the string representation * @param position the starting position in {@code buffer} to start decoding from * @param length the number of bytes from {@code buffer} to use * @param charset the String encoding charset * @return the decoded string */ public static String string(ByteBuffer buffer, int position, int length, Charset charset) throws CharacterCodingException { ByteBuffer copy = buffer.duplicate(); copy.position(position); copy.limit(copy.position() + length); return string(copy, charset); } /** * Decode a String representation. * * @param buffer a byte buffer holding the string representation * @param charset the String encoding charset * @return the decoded string */ public static String string(ByteBuffer buffer, Charset charset) throws CharacterCodingException { return charset.newDecoder().decode(buffer.duplicate()).toString(); } /** * You should almost never use this. Instead, use the write* methods to avoid copies. */ public static byte[] getArray(ByteBuffer buffer) { int length = buffer.remaining(); if (buffer.hasArray()) { int start = buffer.position(); if (buffer.arrayOffset() == 0 && start == 0 && length == buffer.array().length) return buffer.array(); else return Arrays.copyOfRange(buffer.array(), start + buffer.arrayOffset(), start + length + buffer.arrayOffset()); } // else, DirectByteBuffer.get() is the fastest route byte[] bytes = new byte[length]; buffer.duplicate().get(bytes); return bytes; } /** * ByteBuffer adaptation of org.apache.commons.lang.ArrayUtils.lastIndexOf method * * @param buffer the array to traverse for looking for the object, may be <code>null</code> * @param valueToFind the value to find * @param startIndex the start index (i.e. BB position) to travers backwards from * @return the last index (i.e. BB position) of the value within the array * [between buffer.position() and buffer.limit()]; <code>-1</code> if not found. */ public static int lastIndexOf(ByteBuffer buffer, byte valueToFind, int startIndex) { assert buffer != null; if (startIndex < buffer.position()) { return -1; } else if (startIndex >= buffer.limit()) { startIndex = buffer.limit() - 1; } for (int i = startIndex; i >= buffer.position(); i--) { if (valueToFind == buffer.get(i)) return i; } return -1; } /** * Encode a String in a ByteBuffer using UTF_8. * * @param s the string to encode * @return the encoded string */ public static ByteBuffer bytes(String s) { return ByteBuffer.wrap(s.getBytes(UTF_8)); } /** * Encode a String in a ByteBuffer using the provided charset. * * @param s the string to encode * @param charset the String encoding charset to use * @return the encoded string */ public static ByteBuffer bytes(String s, Charset charset) { return ByteBuffer.wrap(s.getBytes(charset)); } /** * @return a new copy of the data in @param buffer * USUALLY YOU SHOULD USE ByteBuffer.duplicate() INSTEAD, which creates a new Buffer * (so you can mutate its position without affecting the original) without copying the underlying array. */ public static ByteBuffer clone(ByteBuffer buffer) { assert buffer != null; if (buffer.remaining() == 0) return EMPTY_BYTE_BUFFER; ByteBuffer clone = ByteBuffer.allocate(buffer.remaining()); if (buffer.hasArray()) { System.arraycopy(buffer.array(), buffer.arrayOffset() + buffer.position(), clone.array(), 0, buffer.remaining()); } else { clone.put(buffer.duplicate()); clone.flip(); } return clone; } public static void arrayCopy(ByteBuffer buffer, int position, byte[] bytes, int offset, int length) { if (buffer.hasArray()) System.arraycopy(buffer.array(), buffer.arrayOffset() + position, bytes, offset, length); else ((ByteBuffer) buffer.duplicate().position(position)).get(bytes, offset, length); } /** * Transfer bytes from one ByteBuffer to another. * This function acts as System.arrayCopy() but for ByteBuffers. * * @param src the source ByteBuffer * @param srcPos starting position in the source ByteBuffer * @param dst the destination ByteBuffer * @param dstPos starting position in the destination ByteBuffer * @param length the number of bytes to copy */ public static void arrayCopy(ByteBuffer src, int srcPos, ByteBuffer dst, int dstPos, int length) { if (src.hasArray() && dst.hasArray()) { System.arraycopy(src.array(), src.arrayOffset() + srcPos, dst.array(), dst.arrayOffset() + dstPos, length); } else { if (src.limit() - srcPos < length || dst.limit() - dstPos < length) throw new IndexOutOfBoundsException(); for (int i = 0; i < length; i++) { dst.put(dstPos++, src.get(srcPos++)); } } } public static void writeWithLength(ByteBuffer bytes, DataOutput out) throws IOException { out.writeInt(bytes.remaining()); write(bytes, out); // writing data bytes to output source } public static void write(ByteBuffer buffer, DataOutput out) throws IOException { if (buffer.hasArray()) { out.write(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); } else { for (int i = buffer.position(); i < buffer.limit(); i++) { out.writeByte(buffer.get(i)); } } } public static void writeWithShortLength(ByteBuffer buffer, DataOutput out) { int length = buffer.remaining(); assert 0 <= length && length <= FBUtilities.MAX_UNSIGNED_SHORT : length; try { out.writeByte((length >> 8) & 0xFF); out.writeByte(length & 0xFF); write(buffer, out); // writing data bytes to output source } catch (IOException e) { throw new RuntimeException(e); } } public static ByteBuffer readWithLength(DataInput in) throws IOException { int length = in.readInt(); if (length < 0) { throw new IOException("Corrupt (negative) value length encountered"); } return ByteBufferUtil.read(in, length); } /* @return An unsigned short in an integer. */ private static int readShortLength(DataInput in) throws IOException { int length = (in.readByte() & 0xFF) << 8; return length | (in.readByte() & 0xFF); } /** * @param in data input * @return An unsigned short in an integer. * @throws IOException if an I/O error occurs. */ public static ByteBuffer readWithShortLength(DataInput in) throws IOException { return ByteBufferUtil.read(in, readShortLength(in)); } /** * @param in data input * @return null * @throws IOException if an I/O error occurs. */ public static ByteBuffer skipShortLength(DataInput in) throws IOException { int skip = readShortLength(in); FileUtils.skipBytesFully(in, skip); return null; } private static ByteBuffer read(DataInput in, int length) throws IOException { ByteBuffer array; if (in instanceof FileDataInput) { array = ((FileDataInput) in).readBytes(length); } else { byte[] buff = new byte[length]; in.readFully(buff); array = ByteBuffer.wrap(buff); } return array; } /** * Convert a byte buffer to an integer. * Does not change the byte buffer position. * * @param bytes byte buffer to convert to integer * @return int representation of the byte buffer */ public static int toInt(ByteBuffer bytes) { return bytes.getInt(bytes.position()); } public static long toLong(ByteBuffer bytes) { return bytes.getLong(bytes.position()); } public static float toFloat(ByteBuffer bytes) { return bytes.getFloat(bytes.position()); } public static double toDouble(ByteBuffer bytes) { return bytes.getDouble(bytes.position()); } public static ByteBuffer bytes(int i) { return ByteBuffer.allocate(4).putInt(0, i); } public static ByteBuffer bytes(long n) { return ByteBuffer.allocate(8).putLong(0, n); } public static ByteBuffer bytes(float f) { return ByteBuffer.allocate(4).putFloat(0, f); } public static ByteBuffer bytes(double d) { return ByteBuffer.allocate(8).putDouble(0, d); } public static InputStream inputStream(ByteBuffer bytes) { final ByteBuffer copy = bytes.duplicate(); return new InputStream() { public int read() throws IOException { if (!copy.hasRemaining()) return -1; return copy.get() & 0xFF; } @Override public int read(byte[] bytes, int off, int len) throws IOException { if (!copy.hasRemaining()) return -1; len = Math.min(len, copy.remaining()); copy.get(bytes, off, len); return len; } @Override public int available() throws IOException { return copy.remaining(); } }; } public static String bytesToHex(ByteBuffer bytes) { StringBuilder sb = new StringBuilder(); for (int i = bytes.position(); i < bytes.limit(); i++) { int bint = bytes.get(i) & 0xff; if (bint <= 0xF) // toHexString does not 0 pad its results. sb.append("0"); sb.append(Integer.toHexString(bint)); } return sb.toString(); } public static ByteBuffer hexToBytes(String str) { return ByteBuffer.wrap(FBUtilities.hexToBytes(str)); } /** * Compare two ByteBuffer at specified offsets for length. * Compares the non equal bytes as unsigned. * @param bytes1 First byte buffer to compare. * @param offset1 Position to start the comparison at in the first array. * @param bytes2 Second byte buffer to compare. * @param offset2 Position to start the comparison at in the second array. * @param length How many bytes to compare? * @return -1 if byte1 is less than byte2, 1 if byte2 is less than byte1 or 0 if equal. */ public static int compareSubArrays(ByteBuffer bytes1, int offset1, ByteBuffer bytes2, int offset2, int length) { if ( null == bytes1 ) { if ( null == bytes2) return 0; else return -1; } if (null == bytes2 ) return 1; assert bytes1.limit() >= offset1 + length : "The first byte array isn't long enough for the specified offset and length."; assert bytes2.limit() >= offset2 + length : "The second byte array isn't long enough for the specified offset and length."; for ( int i = 0; i < length; i++ ) { byte byte1 = bytes1.get(offset1 + i); byte byte2 = bytes2.get(offset2 + i); if ( byte1 == byte2 ) continue; // compare non-equal bytes as unsigned return (byte1 & 0xFF) < (byte2 & 0xFF) ? -1 : 1; } return 0; } }
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/calendar/tags/sakai-10.6/calendar-summary-tool/tool/src/java/org/sakaiproject/tool/summarycalendar/ui/PrefsBean.java $ * $Id: PrefsBean.java 311795 2014-08-11 13:54:01Z enietzel@anisakai.com $ *********************************************************************************** * * Copyright (c) 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.summarycalendar.ui; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.Map.Entry; import java.util.regex.Pattern; import javax.faces.application.FacesMessage; import javax.faces.application.FacesMessage.Severity; import javax.faces.context.FacesContext; import javax.faces.el.ValueBinding; import javax.faces.model.SelectItem; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.entity.api.ResourcePropertiesEdit; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.tool.api.SessionManager; import org.sakaiproject.user.api.Preferences; import org.sakaiproject.user.api.PreferencesEdit; import org.sakaiproject.user.api.PreferencesService; import org.sakaiproject.util.ResourceLoader; public class PrefsBean { /** Preferences properties */ public static String PREFS_KEY = "sakai:calendar:calendar-summary"; public static String PREFS_LAST_MODIFIED = "lastModified"; public static String PREFS_VIEW_MODE = "viewMode"; public static String PREFS_HIGHPRIORITY_COLOR = "highPriorityColor"; public static String PREFS_MEDIUMPRIORITY_COLOR = "mediumPriorityColor"; public static String PREFS_LOWPRIORITY_COLOR = "lowPriorityColor"; public static String PREFS_HIGHPRIORITY_EVENTS = "highPriorityEvents"; public static String PREFS_MEDIUMPRIORITY_EVENTS = "mediumPriorityEvents"; public static String PREFS_LOWPRIORITY_EVENTS = "lowPriorityEvents"; /** sakai.properties default values */ public static String SAKPROP_BASE = "calendarSummary."; /** Our log (commons). */ private static Log LOG = LogFactory.getLog(PrefsBean.class); /** Resource bundle */ private transient ResourceLoader msgs = new ResourceLoader("calendar"); /** Bean members */ private List viewModes = null; private String selectedViewMode = null; private String selectedHighPrColor = null; private String selectedMediumPrColor = null; private String selectedLowPrColor = null; //private static List eventTypes = null; private Collection highPriorityEvents = null; private Collection mediumPriorityEvents = null; private Collection lowPriorityEvents = null; /** Private members */ private String message = null; private Severity messageSeverity = null; private Map priorityColorsMap = null; private Map priorityEventsMap = null; /** Sakai Services */ private static transient PreferencesService M_ps = (PreferencesService) ComponentManager.get(PreferencesService.class.getName()); private static transient SessionManager M_sm = (SessionManager) ComponentManager.get(SessionManager.class.getName()); private static transient ServerConfigurationService M_cfg = (ServerConfigurationService) ComponentManager.get(ServerConfigurationService.class.getName()); private static final Pattern COLOR_HEX_PATTERN = Pattern.compile("^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$"); // ###################################################################################### // Main methods // ###################################################################################### public PrefsBean(){ } public String getInitValues() { // reload localized event types EventTypes.reloadLocalization(); return ""; } // ###################################################################################### // Action/ActionListener methods // ###################################################################################### public boolean isMessageToBeDisplayed() { if(message != null){ FacesContext fc = FacesContext.getCurrentInstance(); fc.addMessage("msg", new FacesMessage(messageSeverity, message, null)); message = null; return true; } return false; } private String getValidatedColorValue(String componentId) throws Exception { String value = getValueFromFacesContext(componentId).trim(); if ("".equals(value)) { return value; } if (!COLOR_HEX_PATTERN.matcher(value).matches()) { throw new Exception("Invalid hex color code."); } return value; } public String update() { try{ // read from FacesContext setSelectedViewMode(getValueFromFacesContext("prefsForm:selectViewMode")); setSelectedHighPriorityColor(getValidatedColorValue("prefsForm:highPriorityColor")); setSelectedMediumPriorityColor(getValidatedColorValue("prefsForm:mediumPriorityColor")); setSelectedLowPriorityColor(getValidatedColorValue("prefsForm:lowPriorityColor")); setSelectedHighPriorityEvents(getValuesFromFacesContext("prefsForm:highPriorityEvents")); setSelectedMediumPriorityEvents(getValuesFromFacesContext("prefsForm:mediumPriorityEvents")); setSelectedLowPriorityEvents(getValuesFromFacesContext("prefsForm:lowPriorityEvents")); // update User Preferences setPreferenceString(PREFS_VIEW_MODE, selectedViewMode); setPreferenceString(PREFS_HIGHPRIORITY_COLOR, selectedHighPrColor); setPreferenceString(PREFS_MEDIUMPRIORITY_COLOR, selectedMediumPrColor); setPreferenceString(PREFS_LOWPRIORITY_COLOR, selectedLowPrColor); clearPreferenceList(PREFS_HIGHPRIORITY_EVENTS); clearPreferenceList(PREFS_MEDIUMPRIORITY_EVENTS); clearPreferenceList(PREFS_LOWPRIORITY_EVENTS); setPreferenceList(PREFS_HIGHPRIORITY_EVENTS, highPriorityEvents); setPreferenceList(PREFS_MEDIUMPRIORITY_EVENTS, mediumPriorityEvents); setPreferenceList(PREFS_LOWPRIORITY_EVENTS, lowPriorityEvents); setPreferenceString(PREFS_LAST_MODIFIED, Long.toString(System.currentTimeMillis())); priorityColorsMap = null; priorityEventsMap = null; }catch(Exception e){ // error occurred message = msgs.getString("prefs_not_updated"); messageSeverity = FacesMessage.SEVERITY_FATAL; LOG.error("Calendar Summary: "+message, e); return "prefs"; } // all ok return "calendar"; } public String cancel() { message = null; priorityColorsMap = null; priorityEventsMap = null; return "calendar"; } // ###################################################################################### // Generic get/set methods // ###################################################################################### private void readPriorityColorsMap() { // priority colors (CSS properties) priorityColorsMap = getPreferencePriorityColors(); selectedHighPrColor = (String) priorityColorsMap.get(PREFS_HIGHPRIORITY_COLOR); selectedMediumPrColor = (String) priorityColorsMap.get(PREFS_MEDIUMPRIORITY_COLOR); selectedLowPrColor = (String) priorityColorsMap.get(PREFS_LOWPRIORITY_COLOR); } private void readPriorityEventsMap() { // priority events priorityEventsMap = getPreferencePriorityEvents(); highPriorityEvents = (List) priorityEventsMap.get(PREFS_HIGHPRIORITY_EVENTS); mediumPriorityEvents = (List) priorityEventsMap.get(PREFS_MEDIUMPRIORITY_EVENTS); lowPriorityEvents = (List) priorityEventsMap.get(PREFS_LOWPRIORITY_EVENTS); } public String getSelectedViewMode() { selectedViewMode = getPreferenceViewMode(); return selectedViewMode; } public void setSelectedViewMode(String selectedViewMode) { this.selectedViewMode = selectedViewMode; } public List getViewModes() { viewModes = new ArrayList(); viewModes.add(new SelectItem(CalendarBean.MODE_MONTHVIEW, msgs.getString("month_view"))); viewModes.add(new SelectItem(CalendarBean.MODE_WEEKVIEW, msgs.getString("week_view"))); return viewModes; } public String getSelectedHighPriorityColor() { if(priorityColorsMap == null) readPriorityColorsMap(); return selectedHighPrColor; } public void setSelectedHighPriorityColor(String color) { this.selectedHighPrColor = color; } public String getSelectedMediumPriorityColor() { if(priorityColorsMap == null) readPriorityColorsMap(); return selectedMediumPrColor; } public void setSelectedMediumPriorityColor(String color) { this.selectedMediumPrColor = color; } public String getSelectedLowPriorityColor() { if(priorityColorsMap == null) readPriorityColorsMap(); return selectedLowPrColor; } public void setSelectedLowPriorityColor(String color) { this.selectedLowPrColor = color; } public List<SelectItem> getHighPriorityEvents(){ if(priorityEventsMap == null) readPriorityEventsMap(); return listOfEventTypesToLocalizedList(highPriorityEvents); } public void setHighPriorityEvents(List<SelectItem> events){ this.highPriorityEvents = new ArrayList<String>(); Iterator<SelectItem> i = events.iterator(); while(i.hasNext()){ SelectItem e = i.next(); highPriorityEvents.add(e.getValue()); } } public List<String> getSelectedHighPriorityEvents(){ return new ArrayList<String>(); } public void setSelectedHighPriorityEvents(List<String> events){ this.highPriorityEvents = events; } public void setSelectedHighPriorityEvents(Collection<String> events){ this.highPriorityEvents = events; } public List<SelectItem> getMediumPriorityEvents(){ if(priorityEventsMap == null) readPriorityEventsMap(); return listOfEventTypesToLocalizedList(mediumPriorityEvents); } public void setMediumPriorityEvents(List<SelectItem> events){ this.mediumPriorityEvents = new ArrayList<String>(); Iterator<SelectItem> i = events.iterator(); while(i.hasNext()){ SelectItem e = i.next(); mediumPriorityEvents.add(e.getValue()); } } public List<String> getSelectedMediumPriorityEvents(){ return new ArrayList<String>(); } public void setSelectedMediumPriorityEvents(List<String> events){ this.mediumPriorityEvents = events; } public void setSelectedMediumPriorityEvents(Collection<String> events){ this.mediumPriorityEvents = events; } public List<SelectItem> getLowPriorityEvents(){ if(priorityEventsMap == null) readPriorityEventsMap(); return listOfEventTypesToLocalizedList(lowPriorityEvents); } public void setLowPriorityEvents(List<SelectItem> events){ this.lowPriorityEvents = new ArrayList<String>(); Iterator<SelectItem> i = events.iterator(); while(i.hasNext()){ SelectItem e = i.next(); lowPriorityEvents.add(e.getValue()); } } public List<String> getSelectedLowPriorityEvents(){ return new ArrayList<String>(); } public void setSelectedLowPriorityEvents(List<String> events){ this.lowPriorityEvents = events; } public void setSelectedLowPriorityEvents(Collection<String> events){ this.lowPriorityEvents = events; } // ###################################################################################### // Preferences methods // ###################################################################################### public static long getPreferenceLastModified() { Long lastModified = 0l; String value = getPreferenceString(PREFS_LAST_MODIFIED); if(value != null){ try{ lastModified = Long.parseLong(value); }catch(NumberFormatException e){ lastModified = 0l; } } return lastModified; } public static String getPreferenceViewMode() { String value = getPreferenceString(PREFS_VIEW_MODE); // preferences not set, read from sakai.properties if(value == null){ value = getDefaultStringFromSakaiProperties(PREFS_VIEW_MODE); } // sakai.properties default not set, using 'month' if(value == null){ return CalendarBean.MODE_MONTHVIEW; }else return value; } public static Map getPreferencePriorityColors() { HashMap map = new HashMap(); String h = getPreferenceString(PREFS_HIGHPRIORITY_COLOR); String m = getPreferenceString(PREFS_MEDIUMPRIORITY_COLOR); String l = getPreferenceString(PREFS_LOWPRIORITY_COLOR); // preferences not set, read from sakai.properties if(h == null && m == null && l == null){ h = getDefaultStringFromSakaiProperties(PREFS_HIGHPRIORITY_COLOR); m = getDefaultStringFromSakaiProperties(PREFS_MEDIUMPRIORITY_COLOR); l = getDefaultStringFromSakaiProperties(PREFS_LOWPRIORITY_COLOR); } map.put(PREFS_HIGHPRIORITY_COLOR, h); map.put(PREFS_MEDIUMPRIORITY_COLOR, m); map.put(PREFS_LOWPRIORITY_COLOR, l); return map; } public static Map getPreferencePriorityEvents() { HashMap map = new HashMap(); List h = getPreferenceList(PREFS_HIGHPRIORITY_EVENTS); List m = getPreferenceList(PREFS_MEDIUMPRIORITY_EVENTS); List l = getPreferenceList(PREFS_LOWPRIORITY_EVENTS); // preferences not set, read from sakai.properties if(h == null && m == null && l == null){ h = getDefaultListFromSakaiProperties(PREFS_HIGHPRIORITY_EVENTS); m = getDefaultListFromSakaiProperties(PREFS_MEDIUMPRIORITY_EVENTS); l = getDefaultListFromSakaiProperties(PREFS_LOWPRIORITY_EVENTS); } if(h == null) h = new ArrayList(); if(m == null) m = new ArrayList(); if(l == null) l = new ArrayList(); // make sure all available events are listed // no pass-by-reference in java, must use a work-around... List temp = new ArrayList(); temp.addAll(EventTypes.getEventTypes()); PairList lists = new PairList(h, temp); lists = validateEventList(lists); h = lists.dataList; lists.dataList = m; lists = validateEventList(lists); m = lists.dataList; lists.dataList = l; lists = validateEventList(lists); l = lists.dataList; // add all non-specified events to low priority list l.addAll(lists.tempList); // sort lists //Collections.sort(h); //Collections.sort(m); //Collections.sort(l); map.put(PREFS_HIGHPRIORITY_EVENTS, h); map.put(PREFS_MEDIUMPRIORITY_EVENTS, m); map.put(PREFS_LOWPRIORITY_EVENTS, l); return map; } /** * Get the current user preference value. First attempt Preferences, then defaults from sakai.properties. * @param name The property name. * @return The preference value or null if not set. */ private static String getPreferenceString(String name) { Preferences prefs = M_ps.getPreferences(M_sm.getCurrentSessionUserId()); ResourceProperties rp = prefs.getProperties(PREFS_KEY); String value = rp.getProperty(name); return value; } /** * Get the current user preference list value. First attempt Preferences, then defaults from sakai.properties. * @param name The property name. * @return The preference list value or null if not set. */ private static List getPreferenceList(String name) { Preferences prefs = M_ps.getPreferences(M_sm.getCurrentSessionUserId()); ResourceProperties rp = prefs.getProperties(PREFS_KEY); List l = rp.getPropertyList(name); return l; } private static void setPreferenceString(String name, String value) throws Exception { PreferencesEdit prefsEdit = null; String userId = M_sm.getCurrentSessionUserId(); try{ prefsEdit = M_ps.edit(userId); }catch(IdUnusedException e){ prefsEdit = M_ps.add(userId); } try{ ResourcePropertiesEdit props = prefsEdit.getPropertiesEdit(PREFS_KEY); if(value == null){ props.removeProperty(name); }else{ props.addProperty(name, value.toString()); } }catch(Exception e){ if(prefsEdit != null) M_ps.cancel(prefsEdit); throw e; } M_ps.commit(prefsEdit); } private static void setPreferenceList(String name, Collection values) throws Exception { PreferencesEdit prefsEdit = null; String userId = M_sm.getCurrentSessionUserId(); try{ prefsEdit = M_ps.edit(userId); }catch(IdUnusedException e){ prefsEdit = M_ps.add(userId); } try{ ResourcePropertiesEdit props = prefsEdit.getPropertiesEdit(PREFS_KEY); if(values == null){ props.removeProperty(name); }else{ List existing = props.getPropertyList(name); Iterator it = values.iterator(); while(it.hasNext()){ String value = (String) it.next(); if(existing == null || !existing.contains(value)) props.addPropertyToList(name, value.toString()); } } }catch(Exception e){ if(prefsEdit != null) M_ps.cancel(prefsEdit); M_ps.cancel(prefsEdit); throw e; } M_ps.commit(prefsEdit); } private static void clearPreferenceList(String name) throws Exception { PreferencesEdit prefsEdit = null; try{ prefsEdit = M_ps.edit(M_sm.getCurrentSessionUserId()); ResourcePropertiesEdit props = prefsEdit.getPropertiesEdit(PREFS_KEY); props.removeProperty(name); }catch(Exception e){ M_ps.cancel(prefsEdit); throw e; } M_ps.commit(prefsEdit); } private static String getDefaultStringFromSakaiProperties(String name) { String value = M_cfg.getString(SAKPROP_BASE + name); return value; } private static List getDefaultListFromSakaiProperties(String name) { List l = new ArrayList(); String[] valuesStr = M_cfg.getStrings(SAKPROP_BASE + name); if(valuesStr == null) return l; else{ for(int i=0; i<valuesStr.length; i++){ l.add(valuesStr[i]); } } return l; } // ###################################################################################### // Util methods // ###################################################################################### protected Set getValuesFromFacesContext(String componentId) { Set values = new HashSet(); String[] str = (String[]) FacesContext.getCurrentInstance().getExternalContext().getRequestParameterValuesMap().get(componentId); if(str != null){ for(int i = 0; i < str.length; i++){ values.add(str[i]); } } return values; } protected String getValueFromFacesContext(String componentId) { String[] str = (String[]) FacesContext.getCurrentInstance().getExternalContext().getRequestParameterValuesMap().get(componentId); return str[0]; } private List<SelectItem> listOfEventTypesToLocalizedList(Collection<String> l) { List<SelectItem> list = new ArrayList<SelectItem>(); if(l == null) return list; Iterator<String> lI = l.iterator(); while(lI.hasNext()){ String eventType = lI.next(); SelectItem item = new SelectItem(eventType, EventTypes.getLocalizedEventType(eventType)); list.add(item); } return list; } /** * Foreach 'list' entry A, remove it from 'temp'. If A doesn't exist in 'temp', remove it from 'list'. * @param list A event priority List. * @param temp A List with all events. */ private static PairList validateEventList(PairList lists) { List temp = lists.tempList; List list = lists.dataList; if(list == null){ return lists; } List toRemoveFromList = new ArrayList(); Iterator iL = list.iterator(); while(iL.hasNext()){ Object e = iL.next(); if(temp.contains(e)) temp.remove(e); else toRemoveFromList.add(e); } Iterator iR = toRemoveFromList.iterator(); while(iR.hasNext()){ Object e = iR.next(); list.remove(e); } lists.dataList = list; lists.tempList = temp; return lists; } } /** Pair of lists */ class PairList { public List dataList; public List tempList; public PairList(List dataList, List tempList) { this.dataList = dataList; this.tempList = tempList; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.metron.elasticsearch.integration; import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.adrianwalker.multilinestring.Multiline; import org.apache.metron.common.Constants; import org.apache.metron.common.utils.JSONUtils; import org.apache.metron.elasticsearch.dao.ElasticsearchDao; import org.apache.metron.elasticsearch.integration.components.ElasticSearchComponent; import org.apache.metron.indexing.dao.AccessConfig; import org.apache.metron.indexing.dao.IndexDao; import org.apache.metron.indexing.dao.SearchIntegrationTest; import org.apache.metron.indexing.dao.search.FieldType; import org.apache.metron.indexing.dao.search.GroupRequest; import org.apache.metron.indexing.dao.search.InvalidSearchException; import org.apache.metron.indexing.dao.search.SearchRequest; import org.apache.metron.indexing.dao.search.SearchResponse; import org.apache.metron.indexing.dao.search.SearchResult; import org.apache.metron.integration.InMemoryComponent; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.WriteRequest; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; public class ElasticsearchSearchIntegrationTest extends SearchIntegrationTest { private static String indexDir = "target/elasticsearch_search"; private static String dateFormat = "yyyy.MM.dd.HH"; private static final int MAX_RETRIES = 10; private static final int SLEEP_MS = 500; protected static IndexDao dao; /** * { * "bro_doc": { * "properties": { * "source:type": { * "type": "text", * "fielddata" : "true" * }, * "guid" : { * "type" : "keyword" * }, * "ip_src_addr": { * "type": "ip" * }, * "ip_src_port": { * "type": "integer" * }, * "long_field": { * "type": "long" * }, * "timestamp": { * "type": "date", * "format": "epoch_millis" * }, * "latitude" : { * "type": "float" * }, * "score": { * "type": "double" * }, * "is_alert": { * "type": "boolean" * }, * "location_point": { * "type": "geo_point" * }, * "bro_field": { * "type": "text", * "fielddata" : "true" * }, * "ttl": { * "type": "text", * "fielddata" : "true" * }, * "alert": { * "type": "nested" * } * } * } * } */ @Multiline private static String broTypeMappings; /** * { * "snort_doc": { * "properties": { * "source:type": { * "type": "text", * "fielddata" : "true" * }, * "guid" : { * "type" : "keyword" * }, * "ip_src_addr": { * "type": "ip" * }, * "ip_src_port": { * "type": "integer" * }, * "long_field": { * "type": "long" * }, * "timestamp": { * "type": "date", * "format": "epoch_millis" * }, * "latitude" : { * "type": "float" * }, * "score": { * "type": "double" * }, * "is_alert": { * "type": "boolean" * }, * "location_point": { * "type": "geo_point" * }, * "snort_field": { * "type": "integer" * }, * "ttl": { * "type": "integer" * }, * "alert": { * "type": "nested" * }, * "threat:triage:score": { * "type": "float" * } * } * } * } */ @Multiline private static String snortTypeMappings; /** * { * "bro_doc_default": { * "dynamic_templates": [{ * "strings": { * "match_mapping_type": "string", * "mapping": { * "type": "text" * } * } * }] * } * } */ @Multiline private static String broDefaultStringMappings; @BeforeClass public static void setup() throws Exception { indexComponent = startIndex(); dao = createDao(); // The data is all static for searches, so we can set it up beforehand, and it's faster loadTestData(); } protected static IndexDao createDao() { AccessConfig config = new AccessConfig(); config.setMaxSearchResults(100); config.setMaxSearchGroups(100); config.setGlobalConfigSupplier( () -> new HashMap<String, Object>() {{ put("es.clustername", "metron"); put("es.port", "9300"); put("es.ip", "localhost"); put("es.date.format", dateFormat); }} ); IndexDao dao = new ElasticsearchDao(); dao.init(config); return dao; } protected static InMemoryComponent startIndex() throws Exception { InMemoryComponent es = new ElasticSearchComponent.Builder() .withHttpPort(9211) .withIndexDir(new File(indexDir)) .build(); es.start(); return es; } protected static void loadTestData() throws ParseException { ElasticSearchComponent es = (ElasticSearchComponent) indexComponent; es.getClient().admin().indices().prepareCreate("bro_index_2017.01.01.01") .addMapping("bro_doc", broTypeMappings) .addMapping("bro_doc_default", broDefaultStringMappings).get(); es.getClient().admin().indices().prepareCreate("snort_index_2017.01.01.02") .addMapping("snort_doc", snortTypeMappings).get(); BulkRequestBuilder bulkRequest = es.getClient().prepareBulk() .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); JSONArray broArray = (JSONArray) new JSONParser().parse(broData); for (Object o : broArray) { JSONObject jsonObject = (JSONObject) o; IndexRequestBuilder indexRequestBuilder = es.getClient() .prepareIndex("bro_index_2017.01.01.01", "bro_doc"); indexRequestBuilder = indexRequestBuilder.setId((String) jsonObject.get("guid")); indexRequestBuilder = indexRequestBuilder.setSource(jsonObject.toJSONString()); indexRequestBuilder = indexRequestBuilder .setTimestamp(jsonObject.get("timestamp").toString()); bulkRequest.add(indexRequestBuilder); } JSONArray snortArray = (JSONArray) new JSONParser().parse(snortData); for (Object o : snortArray) { JSONObject jsonObject = (JSONObject) o; IndexRequestBuilder indexRequestBuilder = es.getClient() .prepareIndex("snort_index_2017.01.01.02", "snort_doc"); indexRequestBuilder = indexRequestBuilder.setId((String) jsonObject.get("guid")); indexRequestBuilder = indexRequestBuilder.setSource(jsonObject.toJSONString()); indexRequestBuilder = indexRequestBuilder .setTimestamp(jsonObject.get("timestamp").toString()); bulkRequest.add(indexRequestBuilder); } BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { throw new RuntimeException("Failed to index test data"); } } @Test public void bad_facet_query_throws_exception() throws Exception { thrown.expect(InvalidSearchException.class); thrown.expectMessage("Failed to execute search"); SearchRequest request = JSONUtils.INSTANCE.load(badFacetQuery, SearchRequest.class); dao.search(request); } @Override public void returns_column_metadata_for_specified_indices() throws Exception { // getColumnMetadata with only bro { Map<String, FieldType> fieldTypes = dao.getColumnMetadata(Collections.singletonList("bro")); Assert.assertEquals(13, fieldTypes.size()); Assert.assertEquals(FieldType.TEXT, fieldTypes.get("bro_field")); Assert.assertEquals(FieldType.TEXT, fieldTypes.get("ttl")); Assert.assertEquals(FieldType.KEYWORD, fieldTypes.get("guid")); Assert.assertEquals(FieldType.TEXT, fieldTypes.get("source:type")); Assert.assertEquals(FieldType.IP, fieldTypes.get("ip_src_addr")); Assert.assertEquals(FieldType.INTEGER, fieldTypes.get("ip_src_port")); Assert.assertEquals(FieldType.LONG, fieldTypes.get("long_field")); Assert.assertEquals(FieldType.DATE, fieldTypes.get("timestamp")); Assert.assertEquals(FieldType.FLOAT, fieldTypes.get("latitude")); Assert.assertEquals(FieldType.DOUBLE, fieldTypes.get("score")); Assert.assertEquals(FieldType.BOOLEAN, fieldTypes.get("is_alert")); Assert.assertEquals(FieldType.OTHER, fieldTypes.get("location_point")); Assert.assertEquals(FieldType.TEXT, fieldTypes.get("bro_field")); Assert.assertEquals(FieldType.TEXT, fieldTypes.get("ttl")); Assert.assertEquals(FieldType.OTHER, fieldTypes.get("alert")); } // getColumnMetadata with only snort { Map<String, FieldType> fieldTypes = dao.getColumnMetadata(Collections.singletonList("snort")); Assert.assertEquals(14, fieldTypes.size()); Assert.assertEquals(FieldType.INTEGER, fieldTypes.get("snort_field")); Assert.assertEquals(FieldType.INTEGER, fieldTypes.get("ttl")); Assert.assertEquals(FieldType.KEYWORD, fieldTypes.get("guid")); Assert.assertEquals(FieldType.TEXT, fieldTypes.get("source:type")); Assert.assertEquals(FieldType.IP, fieldTypes.get("ip_src_addr")); Assert.assertEquals(FieldType.INTEGER, fieldTypes.get("ip_src_port")); Assert.assertEquals(FieldType.LONG, fieldTypes.get("long_field")); Assert.assertEquals(FieldType.DATE, fieldTypes.get("timestamp")); Assert.assertEquals(FieldType.FLOAT, fieldTypes.get("latitude")); Assert.assertEquals(FieldType.DOUBLE, fieldTypes.get("score")); Assert.assertEquals(FieldType.BOOLEAN, fieldTypes.get("is_alert")); Assert.assertEquals(FieldType.OTHER, fieldTypes.get("location_point")); Assert.assertEquals(FieldType.INTEGER, fieldTypes.get("ttl")); Assert.assertEquals(FieldType.OTHER, fieldTypes.get("alert")); } } @Override public void returns_column_data_for_multiple_indices() throws Exception { Map<String, FieldType> fieldTypes = dao.getColumnMetadata(Arrays.asList("bro", "snort")); Assert.assertEquals(15, fieldTypes.size()); Assert.assertEquals(FieldType.KEYWORD, fieldTypes.get("guid")); Assert.assertEquals(FieldType.TEXT, fieldTypes.get("source:type")); Assert.assertEquals(FieldType.IP, fieldTypes.get("ip_src_addr")); Assert.assertEquals(FieldType.INTEGER, fieldTypes.get("ip_src_port")); Assert.assertEquals(FieldType.LONG, fieldTypes.get("long_field")); Assert.assertEquals(FieldType.DATE, fieldTypes.get("timestamp")); Assert.assertEquals(FieldType.FLOAT, fieldTypes.get("latitude")); Assert.assertEquals(FieldType.DOUBLE, fieldTypes.get("score")); Assert.assertEquals(FieldType.BOOLEAN, fieldTypes.get("is_alert")); Assert.assertEquals(FieldType.OTHER, fieldTypes.get("location_point")); Assert.assertEquals(FieldType.TEXT, fieldTypes.get("bro_field")); Assert.assertEquals(FieldType.INTEGER, fieldTypes.get("snort_field")); //NOTE: This is because the field is in both bro and snort and they have different types. Assert.assertEquals(FieldType.OTHER, fieldTypes.get("ttl")); Assert.assertEquals(FieldType.FLOAT, fieldTypes.get("threat:triage:score")); Assert.assertEquals(FieldType.OTHER, fieldTypes.get("alert")); } @Test public void throws_exception_on_aggregation_queries_on_non_string_non_numeric_fields() throws Exception { thrown.expect(InvalidSearchException.class); thrown.expectMessage("Failed to execute search"); GroupRequest request = JSONUtils.INSTANCE.load(badGroupQuery, GroupRequest.class); dao.group(request); } @Test public void different_type_filter_query() throws Exception { SearchRequest request = JSONUtils.INSTANCE.load(differentTypeFilterQuery, SearchRequest.class); SearchResponse response = dao.search(request); Assert.assertEquals(1, response.getTotal()); List<SearchResult> results = response.getResults(); Assert.assertEquals("bro", results.get(0).getSource().get("source:type")); Assert.assertEquals("data 1", results.get(0).getSource().get("ttl")); } @Override protected String getSourceTypeField() { return Constants.SENSOR_TYPE.replace('.', ':'); } @Override protected IndexDao getIndexDao() { return dao; } @Override protected String getIndexName(String sensorType) { if ("bro".equals(sensorType)) { return "bro_index_2017.01.01.01"; } else { return "snort_index_2017.01.01.02"; } } }
/* * LauncherGui.java -TurtleKit - A 'star logo' in MadKit * Copyright (C) 2000-2007 Fabien Michel, Gregory Beurier * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package trashier; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.GridLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.BorderFactory; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSlider; import javax.swing.JTextField; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import edu.turtlekit2.Tk2Launcher; import edu.turtlekit2.kernel.agents.SimulationRunner; import edu.turtlekit2.ui.utils.GUIMessage; import madkit.kernel.OPanel; import madkit.system.property.PropertyAgent; import madkit.utils.graphics.GraphicUtils; /** * this class defines the Graphics object to execute the Launcher method. It is included in tabbledLauncherPanel. * * @author Fabien MICHEL, Gregory BEURIER */ public class LauncherGui implements ActionListener { private static final long serialVersionUID = 1L; JTextField p, dD, pause, displayDelay; //zone de saisie public JButton startStop, wrapOnOff, addViewer, reset, step, python; //les boutons SimulationRunner ll; public JPanel buttons, allbuttons, cycle; public OPanel textDisplay; public JButton bProp; public JPanel contentPane; PropertyAgent prop; ImageIcon iStart, iStep, iReset, iStop, iPythonEd, iView, iProps; public LauncherGui(SimulationRunner l) { ll = l; iStart = createImageIcon("images/Play16.gif", "Play"); iStop = createImageIcon("images/Pause16.gif", "Pause"); iStep = createImageIcon("images/StepForward16.gif", "Step"); iReset = createImageIcon("images/Refresh16.gif","Reset"); iPythonEd = createImageIcon("images/Edit16.gif","Python"); // iView = createImageIcon("images/Zoom16.gif","New Viewer"); // iProps = createImageIcon("images/Help16.gif","Properties"); } private void makebutton(JButton b, JPanel p) { p.add(b); b.addActionListener(this); } JButton createButton(JPanel p, String action, String descr, ImageIcon i) { JButton b; if (i != null) b = new JButton(i); else b = new JButton(action); b.setToolTipText(descr); b.setMargin(new Insets(0, 0, 0, 0)); b.setActionCommand(action); b.addActionListener(this); if (p != null) p.add(b); return b; } protected ImageIcon createImageIcon(String path, String description) { java.net.URL imgURL = getClass().getResource(path); if (imgURL != null) { return new ImageIcon(imgURL, description); } else { System.err.println("Couldn't find file: " + path); return null; } } ImageIcon makeIcon(String path) { java.net.URL url = this.getClass().getResource(path); return new ImageIcon(url); // if (path != null) { // ImageIcon i = null; // java.net.URL u = this.getClass().getResource(path); // if (u != null) // i = new ImageIcon(u); // // if ((i != null) && (i.getImage() != null)) // return i; // } // return null; } void setButtonState(JButton b, String action, ImageIcon icon) { b.setActionCommand(action); if (icon != null) b.setIcon(icon); } public void initButtons(){ if (ll.environment.wrap) wrapOnOff = new JButton("Wrap On"); else wrapOnOff = new JButton("Wrap Off"); addViewer = new JButton("Add Viewer"); allbuttons = new JPanel(new GridLayout(1, 6)); startStop = createButton(allbuttons, "start", "Run and stop the simulation", iStart); step = createButton(allbuttons, "Step", "Step the simulation", iStep); reset = createButton(allbuttons, "Reset", "Reset the simulation", iReset); makebutton(wrapOnOff, allbuttons); // addViewer = createButton(allbuttons, "Add Viewer", "Add a viewer", iView); python = createButton(allbuttons, "Python", "Launch a python editor", iPythonEd); ll.sendMessage(Tk2Launcher.COMMUNITY, ll.simulationGroup, "UIManager", new GUIMessage<JComponent>(allbuttons, SimulationUI.BUTTON_ZONE, "")); } public void initSliders(){ //Create the slider and its label JLabel sliderLabel = new JLabel("Simulation speed", JLabel.CENTER); sliderLabel.setAlignmentX(Component.CENTER_ALIGNMENT); JSlider simulationSpeed = new JSlider(JSlider.HORIZONTAL, 0, 500, 490); simulationSpeed.addChangeListener(new SliderListener()); simulationSpeed.setMajorTickSpacing(250); simulationSpeed.setMinorTickSpacing(10); simulationSpeed.setPaintTicks(true); simulationSpeed.setPaintLabels(false); simulationSpeed.setBorder(BorderFactory.createEmptyBorder(0, 0, 15, 0)); contentPane = new JPanel(new BorderLayout()); contentPane.add(sliderLabel, BorderLayout.WEST); contentPane.add(simulationSpeed, BorderLayout.CENTER); // bProp = createButton(null, "Properties", "Shows the simulation parameters", iProps); // contentPane.add(bProp, BorderLayout.EAST); ll.sendMessage(Tk2Launcher.COMMUNITY, ll.simulationGroup, "UIManager", new GUIMessage<JComponent>(contentPane, SimulationUI.BUTTON_ZONE, "")); } public void initConsole(){ // textDisplay = new OPanel();//JTextArea(); // textDisplay.jscrollpane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); // textDisplay.jscrollpane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED); // ll.setOutputWriter(textDisplay.getOut()); // // ll.sendMessage(Tk2Launcher.COMMUNITY, ll.simulationGroup, "UIManager", // new GUIMessage<JComponent>(textDisplay, SimulationUI.CONSOLE_ZONE, "Console")); } public void initialisation() { initButtons(); initConsole(); initSliders(); } public void actionPerformed(ActionEvent e) { Object s = e.getSource(); if (s == startStop) { if (startStop.getActionCommand().equalsIgnoreCase("Start")) { startStop.setBackground(Color.green); setButtonState(startStop, "Stop", iStop); //startStop.setText("Stop"); ll.start = true; return; } if (ll.run && ll.start) { startStop.setBackground(Color.red); //startStop.setText("Run"); setButtonState(startStop, "Run", iStart); ll.setStop(); return; } else if (ll.start) { startStop.setBackground(Color.green); setButtonState(startStop, "Stop", iStop); //startStop.setText("Stop"); ll.setStop(); } } else if (s == addViewer && ll.start) // ll.addViewer(); System.err.println("TEST"); else if (s == reset && ll.start) { textDisplay.clearOutput(); ll.setReset(); ll.run = true; startStop.setBackground(Color.green); //startStop.setText("Stop"); setButtonState(startStop, "Stop", iStop); } else if (s == wrapOnOff) { if (wrapOnOff.getText().equalsIgnoreCase("Wrap On")) { ll.setWrapModeOn(false); wrapOnOff.setText("Wrap Off"); } else { ll.setWrapModeOn(true); wrapOnOff.setText("Wrap On"); } } //if (s==p) ll.setCyclePause(Integer.parseInt(p.getText())); //if (s==dD) ll.setCycleDisplayEvery(Integer.parseInt(dD.getText())); else if (s == step) { if (ll.start && ll.run) { startStop.setBackground(Color.red); // startStop.setText("Run"); setButtonState(startStop, "Run", iStart); ll.setStop(); ll.stepByStep(); return; } if (ll.start) { ll.stepByStep(); return; } } else if (s == bProp) { if (prop == null) { prop = new PropertyAgent(ll); ll.launchAgent(prop,"Properties of " + ll.simulationGroup, true); } else { // check GraphicUtils.getFrameParent((JComponent) prop.getGUIObject()).setVisible(true); } } else if (s == python) { try { ll.println("launching python. Please wait..."); ll.launchPython(); if (ll.run) { startStop.setBackground(Color.red); //startStop.setText("Run"); setButtonState(startStop, "Run", iStart); ll.setStop(); ll.stepByStep(); return; } } catch (NoClassDefFoundError ex) { ll.println("can't launch python in applet mode"); } catch (Exception ex) { ll.println("can't launch python in applet mode"); } } } void removePropertyWindows() { if (prop != null) { ll.killAgent(prop); } } class SliderListener implements ChangeListener { public void stateChanged(ChangeEvent e) { JSlider source = (JSlider) e.getSource(); if (!source.getValueIsAdjusting()) { ll.sch.delay = (500 - (int) source.getValue()); } } } }
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spanner; import static com.google.common.base.Preconditions.checkState; import com.google.cloud.ByteArray; import java.util.List; /** * Base class for assisting {@link StructReader} implementations. * * <p>This class implements the majority of the {@code StructReader} interface, leaving subclasses * to implement core data access via the {@code getTypeNameInternal()} methods. {@code * AbstractStructReader} guarantees that these will only be called for non-{@code NULL} columns of a * type appropriate for the method. */ public abstract class AbstractStructReader implements StructReader { protected abstract boolean getBooleanInternal(int columnIndex); protected abstract long getLongInternal(int columnIndex); protected abstract double getDoubleInternal(int columnIndex); protected abstract String getStringInternal(int columnIndex); protected abstract ByteArray getBytesInternal(int columnIndex); protected abstract Timestamp getTimestampInternal(int columnIndex); protected abstract Date getDateInternal(int columnIndex); protected abstract boolean[] getBooleanArrayInternal(int columnIndex); protected abstract List<Boolean> getBooleanListInternal(int columnIndex); protected abstract long[] getLongArrayInternal(int columnIndex); protected abstract List<Long> getLongListInternal(int columnIndex); protected abstract double[] getDoubleArrayInternal(int columnIndex); protected abstract List<Double> getDoubleListInternal(int columnIndex); protected abstract List<String> getStringListInternal(int columnIndex); protected abstract List<ByteArray> getBytesListInternal(int columnIndex); protected abstract List<Timestamp> getTimestampListInternal(int columnIndex); protected abstract List<Date> getDateListInternal(int columnIndex); protected abstract List<Struct> getStructListInternal(int columnIndex); @Override public int getColumnCount() { return getType().getStructFields().size(); } @Override public Type getColumnType(int columnIndex) { return getType().getStructFields().get(columnIndex).getType(); } @Override public Type getColumnType(String columnName) { return getType().getStructFields().get(getColumnIndex(columnName)).getType(); } @Override public boolean isNull(String columnName) { return isNull(getColumnIndex(columnName)); } @Override public boolean getBoolean(int columnIndex) { checkNonNullOfType(columnIndex, Type.bool(), columnIndex); return getBooleanInternal(columnIndex); } @Override public boolean getBoolean(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.bool(), columnName); return getBooleanInternal(columnIndex); } @Override public long getLong(int columnIndex) { checkNonNullOfType(columnIndex, Type.int64(), columnIndex); return getLongInternal(columnIndex); } @Override public long getLong(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.int64(), columnName); return getLongInternal(columnIndex); } @Override public double getDouble(int columnIndex) { checkNonNullOfType(columnIndex, Type.float64(), columnIndex); return getDoubleInternal(columnIndex); } @Override public double getDouble(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.float64(), columnName); return getDoubleInternal(columnIndex); } @Override public String getString(int columnIndex) { checkNonNullOfType(columnIndex, Type.string(), columnIndex); return getStringInternal(columnIndex); } @Override public String getString(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.string(), columnName); return getStringInternal(columnIndex); } @Override public ByteArray getBytes(int columnIndex) { checkNonNullOfType(columnIndex, Type.bytes(), columnIndex); return getBytesInternal(columnIndex); } @Override public ByteArray getBytes(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.bytes(), columnName); return getBytesInternal(columnIndex); } @Override public Timestamp getTimestamp(int columnIndex) { checkNonNullOfType(columnIndex, Type.timestamp(), columnIndex); return getTimestampInternal(columnIndex); } @Override public Timestamp getTimestamp(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.timestamp(), columnName); return getTimestampInternal(columnIndex); } @Override public Date getDate(int columnIndex) { checkNonNullOfType(columnIndex, Type.date(), columnIndex); return getDateInternal(columnIndex); } @Override public Date getDate(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.date(), columnName); return getDateInternal(columnIndex); } @Override public boolean[] getBooleanArray(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.bool()), columnIndex); return getBooleanArrayInternal(columnIndex); } @Override public boolean[] getBooleanArray(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.bool()), columnName); return getBooleanArrayInternal(columnIndex); } @Override public List<Boolean> getBooleanList(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.bool()), columnIndex); return getBooleanListInternal(columnIndex); } @Override public List<Boolean> getBooleanList(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.bool()), columnName); return getBooleanListInternal(columnIndex); } @Override public long[] getLongArray(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.int64()), columnIndex); return getLongArrayInternal(columnIndex); } @Override public long[] getLongArray(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.int64()), columnName); return getLongArrayInternal(columnIndex); } @Override public List<Long> getLongList(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.int64()), columnIndex); return getLongListInternal(columnIndex); } @Override public List<Long> getLongList(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.int64()), columnName); return getLongListInternal(columnIndex); } @Override public double[] getDoubleArray(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.float64()), columnIndex); return getDoubleArrayInternal(columnIndex); } @Override public double[] getDoubleArray(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.float64()), columnName); return getDoubleArrayInternal(columnIndex); } @Override public List<Double> getDoubleList(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.float64()), columnIndex); return getDoubleListInternal(columnIndex); } @Override public List<Double> getDoubleList(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.float64()), columnName); return getDoubleListInternal(columnIndex); } @Override public List<String> getStringList(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.string()), columnIndex); return getStringListInternal(columnIndex); } @Override public List<String> getStringList(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.string()), columnName); return getStringListInternal(columnIndex); } @Override public List<ByteArray> getBytesList(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.bytes()), columnIndex); return getBytesListInternal(columnIndex); } @Override public List<ByteArray> getBytesList(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.bytes()), columnName); return getBytesListInternal(columnIndex); } @Override public List<Timestamp> getTimestampList(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.timestamp()), columnIndex); return getTimestampListInternal(columnIndex); } @Override public List<Timestamp> getTimestampList(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.timestamp()), columnName); return getTimestampListInternal(columnIndex); } @Override public List<Date> getDateList(int columnIndex) { checkNonNullOfType(columnIndex, Type.array(Type.date()), columnIndex); return getDateListInternal(columnIndex); } @Override public List<Date> getDateList(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullOfType(columnIndex, Type.array(Type.date()), columnName); return getDateListInternal(columnIndex); } @Override public List<Struct> getStructList(int columnIndex) { checkNonNullArrayOfStruct(columnIndex, columnIndex); return getStructListInternal(columnIndex); } @Override public List<Struct> getStructList(String columnName) { int columnIndex = getColumnIndex(columnName); checkNonNullArrayOfStruct(columnIndex, columnName); return getStructListInternal(columnIndex); } @Override public int getColumnIndex(String columnName) { // Use the Type instance for field name lookup. Type instances are naturally shared by the // ResultSet, all Structs corresponding to rows in the read, and all Structs corresponding to // the values of ARRAY<STRUCT<...>> columns in the read, so this is the best location to // amortize lookup costs. return getType().getFieldIndex(columnName); } private void checkNonNullOfType(int columnIndex, Type expectedType, Object columnNameForError) { Type actualType = getColumnType(columnIndex); checkState( expectedType.equals(actualType), "Column %s is not of correct type: expected %s but was %s", columnNameForError, expectedType, actualType); checkNonNull(columnIndex, columnNameForError); } private void checkNonNullArrayOfStruct(int columnIndex, Object columnNameForError) { Type actualType = getColumnType(columnIndex); checkState( actualType.getCode() == Type.Code.ARRAY && actualType.getArrayElementType().getCode() == Type.Code.STRUCT, "Column %s is not of correct type: expected ARRAY<STRUCT<...>> but was %s", columnNameForError, actualType); checkNonNull(columnIndex, columnNameForError); } private void checkNonNull(int columnIndex, Object columnNameForError) { if (isNull(columnIndex)) { throw new NullPointerException("Column " + columnNameForError + " contains NULL value"); } } }
package com.josiahgaskin.opticon2015demo; import android.support.v7.app.AppCompatActivity; import android.app.Activity; import android.support.v7.app.ActionBar; import android.support.v4.app.Fragment; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.content.SharedPreferences; import android.content.res.Configuration; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.Toast; /** * Fragment used for managing interactions for and presentation of a navigation drawer. * See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction"> * design guidelines</a> for a complete explanation of the behaviors implemented here. */ public class NavigationDrawerFragment extends Fragment { /** * Remember the position of the selected item. */ private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position"; /** * Per the design guidelines, you should show the drawer on launch until the user manually * expands it. This shared preference tracks this. */ private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned"; /** * A pointer to the current callbacks instance (the Activity). */ private NavigationDrawerCallbacks mCallbacks; /** * Helper component that ties the action bar to the navigation drawer. */ private ActionBarDrawerToggle mDrawerToggle; private DrawerLayout mDrawerLayout; private ListView mDrawerListView; private View mFragmentContainerView; private int mCurrentSelectedPosition = 0; private boolean mFromSavedInstanceState; private boolean mUserLearnedDrawer; public NavigationDrawerFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Read in the flag indicating whether or not the user has demonstrated awareness of the // drawer. See PREF_USER_LEARNED_DRAWER for details. SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity()); mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false); if (savedInstanceState != null) { mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION); mFromSavedInstanceState = true; } // Select either the default item (0) or the last selected item. selectItem(mCurrentSelectedPosition); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Indicate that this fragment would like to influence the set of actions in the action bar. setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mDrawerListView = (ListView) inflater.inflate( R.layout.fragment_navigation_drawer, container, false); mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { selectItem(position); } }); mDrawerListView.setAdapter(new ArrayAdapter<String>( getActionBar().getThemedContext(), android.R.layout.simple_list_item_activated_1, android.R.id.text1, new String[]{ getString(R.string.title_section1), getString(R.string.title_section2), })); mDrawerListView.setItemChecked(mCurrentSelectedPosition, true); return mDrawerListView; } public boolean isDrawerOpen() { return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView); } /** * Users of this fragment must call this method to set up the navigation drawer interactions. * * @param fragmentId The android:id of this fragment in its activity's layout. * @param drawerLayout The DrawerLayout containing this fragment's UI. */ public void setUp(int fragmentId, DrawerLayout drawerLayout) { mFragmentContainerView = getActivity().findViewById(fragmentId); mDrawerLayout = drawerLayout; // set a custom shadow that overlays the main content when the drawer opens mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START); // set up the drawer's list view with items and click listener ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); // ActionBarDrawerToggle ties together the the proper interactions // between the navigation drawer and the action bar app icon. mDrawerToggle = new ActionBarDrawerToggle( getActivity(), /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */ R.string.navigation_drawer_open, /* "open drawer" description for accessibility */ R.string.navigation_drawer_close /* "close drawer" description for accessibility */ ) { @Override public void onDrawerClosed(View drawerView) { super.onDrawerClosed(drawerView); if (!isAdded()) { return; } getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu() } @Override public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); if (!isAdded()) { return; } if (!mUserLearnedDrawer) { // The user manually opened the drawer; store this flag to prevent auto-showing // the navigation drawer automatically in the future. mUserLearnedDrawer = true; SharedPreferences sp = PreferenceManager .getDefaultSharedPreferences(getActivity()); sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply(); } getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu() } }; // If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer, // per the navigation drawer design guidelines. if (!mUserLearnedDrawer && !mFromSavedInstanceState) { mDrawerLayout.openDrawer(mFragmentContainerView); } // Defer code dependent on restoration of previous instance state. mDrawerLayout.post(new Runnable() { @Override public void run() { mDrawerToggle.syncState(); } }); mDrawerLayout.setDrawerListener(mDrawerToggle); } private void selectItem(int position) { mCurrentSelectedPosition = position; if (mDrawerListView != null) { mDrawerListView.setItemChecked(position, true); } if (mDrawerLayout != null) { mDrawerLayout.closeDrawer(mFragmentContainerView); } if (mCallbacks != null) { mCallbacks.onNavigationDrawerItemSelected(position); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mCallbacks = (NavigationDrawerCallbacks) activity; } catch (ClassCastException e) { throw new ClassCastException("Activity must implement NavigationDrawerCallbacks."); } } @Override public void onDetach() { super.onDetach(); mCallbacks = null; } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); // Forward the new configuration the drawer toggle component. mDrawerToggle.onConfigurationChanged(newConfig); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // If the drawer is open, show the global app actions in the action bar. See also // showGlobalContextActionBar, which controls the top-left area of the action bar. if (mDrawerLayout != null && isDrawerOpen()) { inflater.inflate(R.menu.global, menu); showGlobalContextActionBar(); } super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } if (item.getItemId() == R.id.action_example) { Toast.makeText(getActivity(), "Example action.", Toast.LENGTH_SHORT).show(); return true; } return super.onOptionsItemSelected(item); } /** * Per the navigation drawer design guidelines, updates the action bar to show the global app * 'context', rather than just what's in the current screen. */ private void showGlobalContextActionBar() { ActionBar actionBar = getActionBar(); actionBar.setDisplayShowTitleEnabled(true); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setTitle(R.string.app_name); } private ActionBar getActionBar() { return ((AppCompatActivity) getActivity()).getSupportActionBar(); } /** * Callbacks interface that all activities using this fragment must implement. */ public static interface NavigationDrawerCallbacks { /** * Called when an item in the navigation drawer is selected. */ void onNavigationDrawerItemSelected(int position); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v8.services; import com.google.ads.googleads.v8.resources.AdGroupBidModifier; import com.google.ads.googleads.v8.resources.AdGroupBidModifierName; import com.google.ads.googleads.v8.services.stub.AdGroupBidModifierServiceStub; import com.google.ads.googleads.v8.services.stub.AdGroupBidModifierServiceStubSettings; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.UnaryCallable; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Service to manage ad group bid modifiers. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * try (AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create()) { * AdGroupBidModifierName resourceName = * AdGroupBidModifierName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]"); * AdGroupBidModifier response = * adGroupBidModifierServiceClient.getAdGroupBidModifier(resourceName); * } * }</pre> * * <p>Note: close() needs to be called on the AdGroupBidModifierServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * * <p>The surface of this class includes several types of Java methods for each of the API's * methods: * * <ol> * <li> A "flattened" method. With this type of method, the fields of the request type have been * converted into function parameters. It may be the case that not all fields are available as * parameters, and not every API method will have a flattened method entry point. * <li> A "request object" method. This type of method only takes one parameter, a request object, * which must be constructed before the call. Not every API method will have a request object * method. * <li> A "callable" method. This type of method takes no parameters and returns an immutable API * callable object, which can be used to initiate calls to the service. * </ol> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of * AdGroupBidModifierServiceSettings to create(). For example: * * <p>To customize credentials: * * <pre>{@code * AdGroupBidModifierServiceSettings adGroupBidModifierServiceSettings = * AdGroupBidModifierServiceSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create(adGroupBidModifierServiceSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * AdGroupBidModifierServiceSettings adGroupBidModifierServiceSettings = * AdGroupBidModifierServiceSettings.newBuilder().setEndpoint(myEndpoint).build(); * AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create(adGroupBidModifierServiceSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class AdGroupBidModifierServiceClient implements BackgroundResource { private final AdGroupBidModifierServiceSettings settings; private final AdGroupBidModifierServiceStub stub; /** Constructs an instance of AdGroupBidModifierServiceClient with default settings. */ public static final AdGroupBidModifierServiceClient create() throws IOException { return create(AdGroupBidModifierServiceSettings.newBuilder().build()); } /** * Constructs an instance of AdGroupBidModifierServiceClient, using the given settings. The * channels are created based on the settings passed in, or defaults for any settings that are not * set. */ public static final AdGroupBidModifierServiceClient create( AdGroupBidModifierServiceSettings settings) throws IOException { return new AdGroupBidModifierServiceClient(settings); } /** * Constructs an instance of AdGroupBidModifierServiceClient, using the given stub for making * calls. This is for advanced usage - prefer using create(AdGroupBidModifierServiceSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final AdGroupBidModifierServiceClient create(AdGroupBidModifierServiceStub stub) { return new AdGroupBidModifierServiceClient(stub); } /** * Constructs an instance of AdGroupBidModifierServiceClient, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected AdGroupBidModifierServiceClient(AdGroupBidModifierServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((AdGroupBidModifierServiceStubSettings) settings.getStubSettings()).createStub(); } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") protected AdGroupBidModifierServiceClient(AdGroupBidModifierServiceStub stub) { this.settings = null; this.stub = stub; } public final AdGroupBidModifierServiceSettings getSettings() { return settings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public AdGroupBidModifierServiceStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the requested ad group bid modifier in full detail. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * try (AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create()) { * AdGroupBidModifierName resourceName = * AdGroupBidModifierName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]"); * AdGroupBidModifier response = * adGroupBidModifierServiceClient.getAdGroupBidModifier(resourceName); * } * }</pre> * * @param resourceName Required. The resource name of the ad group bid modifier to fetch. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AdGroupBidModifier getAdGroupBidModifier(AdGroupBidModifierName resourceName) { GetAdGroupBidModifierRequest request = GetAdGroupBidModifierRequest.newBuilder() .setResourceName(resourceName == null ? null : resourceName.toString()) .build(); return getAdGroupBidModifier(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the requested ad group bid modifier in full detail. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * try (AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create()) { * String resourceName = * AdGroupBidModifierName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]").toString(); * AdGroupBidModifier response = * adGroupBidModifierServiceClient.getAdGroupBidModifier(resourceName); * } * }</pre> * * @param resourceName Required. The resource name of the ad group bid modifier to fetch. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AdGroupBidModifier getAdGroupBidModifier(String resourceName) { GetAdGroupBidModifierRequest request = GetAdGroupBidModifierRequest.newBuilder().setResourceName(resourceName).build(); return getAdGroupBidModifier(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the requested ad group bid modifier in full detail. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * try (AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create()) { * GetAdGroupBidModifierRequest request = * GetAdGroupBidModifierRequest.newBuilder() * .setResourceName( * AdGroupBidModifierName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]") * .toString()) * .build(); * AdGroupBidModifier response = adGroupBidModifierServiceClient.getAdGroupBidModifier(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AdGroupBidModifier getAdGroupBidModifier(GetAdGroupBidModifierRequest request) { return getAdGroupBidModifierCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the requested ad group bid modifier in full detail. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * try (AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create()) { * GetAdGroupBidModifierRequest request = * GetAdGroupBidModifierRequest.newBuilder() * .setResourceName( * AdGroupBidModifierName.of("[CUSTOMER_ID]", "[AD_GROUP_ID]", "[CRITERION_ID]") * .toString()) * .build(); * ApiFuture<AdGroupBidModifier> future = * adGroupBidModifierServiceClient.getAdGroupBidModifierCallable().futureCall(request); * // Do something. * AdGroupBidModifier response = future.get(); * } * }</pre> */ public final UnaryCallable<GetAdGroupBidModifierRequest, AdGroupBidModifier> getAdGroupBidModifierCallable() { return stub.getAdGroupBidModifierCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates, updates, or removes ad group bid modifiers. Operation statuses are returned. * * <p>List of thrown errors: [AdGroupBidModifierError]() [AuthenticationError]() * [AuthorizationError]() [ContextError]() [CriterionError]() [DatabaseError]() [DistinctError]() * [FieldError]() [FieldMaskError]() [HeaderError]() [IdError]() [InternalError]() [MutateError]() * [NewResourceCreationError]() [NotEmptyError]() [OperatorError]() [QuotaError]() [RangeError]() * [RequestError]() [ResourceCountLimitExceededError]() [SizeLimitError]() [StringFormatError]() * [StringLengthError]() * * <p>Sample code: * * <pre>{@code * try (AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create()) { * String customerId = "customerId-1581184615"; * List<AdGroupBidModifierOperation> operations = new ArrayList<>(); * MutateAdGroupBidModifiersResponse response = * adGroupBidModifierServiceClient.mutateAdGroupBidModifiers(customerId, operations); * } * }</pre> * * @param customerId Required. ID of the customer whose ad group bid modifiers are being modified. * @param operations Required. The list of operations to perform on individual ad group bid * modifiers. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final MutateAdGroupBidModifiersResponse mutateAdGroupBidModifiers( String customerId, List<AdGroupBidModifierOperation> operations) { MutateAdGroupBidModifiersRequest request = MutateAdGroupBidModifiersRequest.newBuilder() .setCustomerId(customerId) .addAllOperations(operations) .build(); return mutateAdGroupBidModifiers(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates, updates, or removes ad group bid modifiers. Operation statuses are returned. * * <p>List of thrown errors: [AdGroupBidModifierError]() [AuthenticationError]() * [AuthorizationError]() [ContextError]() [CriterionError]() [DatabaseError]() [DistinctError]() * [FieldError]() [FieldMaskError]() [HeaderError]() [IdError]() [InternalError]() [MutateError]() * [NewResourceCreationError]() [NotEmptyError]() [OperatorError]() [QuotaError]() [RangeError]() * [RequestError]() [ResourceCountLimitExceededError]() [SizeLimitError]() [StringFormatError]() * [StringLengthError]() * * <p>Sample code: * * <pre>{@code * try (AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create()) { * MutateAdGroupBidModifiersRequest request = * MutateAdGroupBidModifiersRequest.newBuilder() * .setCustomerId("customerId-1581184615") * .addAllOperations(new ArrayList<AdGroupBidModifierOperation>()) * .setPartialFailure(true) * .setValidateOnly(true) * .build(); * MutateAdGroupBidModifiersResponse response = * adGroupBidModifierServiceClient.mutateAdGroupBidModifiers(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final MutateAdGroupBidModifiersResponse mutateAdGroupBidModifiers( MutateAdGroupBidModifiersRequest request) { return mutateAdGroupBidModifiersCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates, updates, or removes ad group bid modifiers. Operation statuses are returned. * * <p>List of thrown errors: [AdGroupBidModifierError]() [AuthenticationError]() * [AuthorizationError]() [ContextError]() [CriterionError]() [DatabaseError]() [DistinctError]() * [FieldError]() [FieldMaskError]() [HeaderError]() [IdError]() [InternalError]() [MutateError]() * [NewResourceCreationError]() [NotEmptyError]() [OperatorError]() [QuotaError]() [RangeError]() * [RequestError]() [ResourceCountLimitExceededError]() [SizeLimitError]() [StringFormatError]() * [StringLengthError]() * * <p>Sample code: * * <pre>{@code * try (AdGroupBidModifierServiceClient adGroupBidModifierServiceClient = * AdGroupBidModifierServiceClient.create()) { * MutateAdGroupBidModifiersRequest request = * MutateAdGroupBidModifiersRequest.newBuilder() * .setCustomerId("customerId-1581184615") * .addAllOperations(new ArrayList<AdGroupBidModifierOperation>()) * .setPartialFailure(true) * .setValidateOnly(true) * .build(); * ApiFuture<MutateAdGroupBidModifiersResponse> future = * adGroupBidModifierServiceClient.mutateAdGroupBidModifiersCallable().futureCall(request); * // Do something. * MutateAdGroupBidModifiersResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<MutateAdGroupBidModifiersRequest, MutateAdGroupBidModifiersResponse> mutateAdGroupBidModifiersCallable() { return stub.mutateAdGroupBidModifiersCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } }
// Generated from QL.g4 by ANTLR 4.1 package antlr4; import ast.form.*; import ast.type.*; import ast.literals.*; import expr.conditional.*; import expr.operation.*; import expr.relational.*; import expr.unary.*; import expr.Expr; import expr.Ident; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.misc.*; import org.antlr.v4.runtime.tree.*; import java.util.List; import java.util.Iterator; import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class QLParser extends Parser { protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int T__23=1, T__22=2, T__21=3, T__20=4, T__19=5, T__18=6, T__17=7, T__16=8, T__15=9, T__14=10, T__13=11, T__12=12, T__11=13, T__10=14, T__9=15, T__8=16, T__7=17, T__6=18, T__5=19, T__4=20, T__3=21, T__2=22, T__1=23, T__0=24, WS=25, COMMENT=26, Bool=27, Ident=28, Int=29, Str=30; public static final String[] tokenNames = { "<INVALID>", "'form'", "')'", "'+'", "'*'", "'-'", "'('", "':'", "'if'", "'<'", "'!='", "'<='", "'&&'", "'||'", "'{'", "'>'", "'integer'", "'string'", "'else'", "'/'", "'=='", "'}'", "'>='", "'boolean'", "'!'", "WS", "COMMENT", "Bool", "Ident", "Int", "Str" }; public static final int RULE_forms = 0, RULE_statements = 1, RULE_statement = 2, RULE_question = 3, RULE_ifstatement = 4, RULE_ifelsestate = 5, RULE_unExpr = 6, RULE_mulExpr = 7, RULE_addExpr = 8, RULE_relExpr = 9, RULE_andExpr = 10, RULE_orExpr = 11, RULE_primary = 12, RULE_valuetype = 13; public static final String[] ruleNames = { "forms", "statements", "statement", "question", "ifstatement", "ifelsestate", "unExpr", "mulExpr", "addExpr", "relExpr", "andExpr", "orExpr", "primary", "valuetype" }; @Override public String getGrammarFileName() { return "QL.g4"; } @Override public String[] getTokenNames() { return tokenNames; } @Override public String[] getRuleNames() { return ruleNames; } @Override public ATN getATN() { return _ATN; } public QLParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } public static class FormsContext extends ParserRuleContext { public Form result; public Token Ident; public StatementsContext s; public TerminalNode Ident() { return getToken(QLParser.Ident, 0); } public StatementsContext statements() { return getRuleContext(StatementsContext.class,0); } public FormsContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_forms; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterForms(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitForms(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitForms(this); else return visitor.visitChildren(this); } } public final FormsContext forms() throws RecognitionException { FormsContext _localctx = new FormsContext(_ctx, getState()); enterRule(_localctx, 0, RULE_forms); try { enterOuterAlt(_localctx, 1); { setState(28); match(1); setState(29); ((FormsContext)_localctx).Ident = match(Ident); setState(30); match(14); setState(31); ((FormsContext)_localctx).s = statements(0); setState(32); match(21); ((FormsContext)_localctx).result = new Form(new Ident((((FormsContext)_localctx).Ident!=null?((FormsContext)_localctx).Ident.getText():null)), (StatementList)((FormsContext)_localctx).s.result); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class StatementsContext extends ParserRuleContext { public int _p; public StatementList result; public StatementsContext ss; public StatementContext s1; public StatementContext s2; public StatementContext statement() { return getRuleContext(StatementContext.class,0); } public StatementsContext statements() { return getRuleContext(StatementsContext.class,0); } public StatementsContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } public StatementsContext(ParserRuleContext parent, int invokingState, int _p) { super(parent, invokingState); this._p = _p; } @Override public int getRuleIndex() { return RULE_statements; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterStatements(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitStatements(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitStatements(this); else return visitor.visitChildren(this); } } public final StatementsContext statements(int _p) throws RecognitionException { ParserRuleContext _parentctx = _ctx; int _parentState = getState(); StatementsContext _localctx = new StatementsContext(_ctx, _parentState, _p); StatementsContext _prevctx = _localctx; int _startState = 2; enterRecursionRule(_localctx, RULE_statements); try { int _alt; enterOuterAlt(_localctx, 1); { { setState(36); ((StatementsContext)_localctx).s1 = statement(); ((StatementsContext)_localctx).result = new StatementList(); _localctx.result.addStatementtoList(((StatementsContext)_localctx).s1.result); } _ctx.stop = _input.LT(-1); setState(45); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=-1 ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { { _localctx = new StatementsContext(_parentctx, _parentState, _p); _localctx.ss = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_statements); setState(39); if (!(1 >= _localctx._p)) throw new FailedPredicateException(this, "1 >= $_p"); setState(40); ((StatementsContext)_localctx).s2 = statement(); ((StatementsContext)_localctx).result = ((StatementsContext)_localctx).ss.result; ((StatementsContext)_localctx).ss.result.addStatementtoList(((StatementsContext)_localctx).s2.result); } } } setState(47); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { unrollRecursionContexts(_parentctx); } return _localctx; } public static class StatementContext extends ParserRuleContext { public Statement result; public QuestionContext q; public IfstatementContext x; public IfelsestateContext y; public IfstatementContext ifstatement() { return getRuleContext(IfstatementContext.class,0); } public QuestionContext question() { return getRuleContext(QuestionContext.class,0); } public IfelsestateContext ifelsestate() { return getRuleContext(IfelsestateContext.class,0); } public StatementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_statement; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterStatement(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitStatement(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitStatement(this); else return visitor.visitChildren(this); } } public final StatementContext statement() throws RecognitionException { StatementContext _localctx = new StatementContext(_ctx, getState()); enterRule(_localctx, 4, RULE_statement); try { setState(57); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { setState(48); ((StatementContext)_localctx).q = question(); ((StatementContext)_localctx).result = ((StatementContext)_localctx).q.result; } break; case 2: enterOuterAlt(_localctx, 2); { setState(51); ((StatementContext)_localctx).x = ifstatement(); ((StatementContext)_localctx).result = ((StatementContext)_localctx).x.result; } break; case 3: enterOuterAlt(_localctx, 3); { setState(54); ((StatementContext)_localctx).y = ifelsestate(); ((StatementContext)_localctx).result = ((StatementContext)_localctx).y.result; } break; } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class QuestionContext extends ParserRuleContext { public Statement result; public Token Ident; public Token Str; public ValuetypeContext v1; public ValuetypeContext v2; public OrExprContext e1; public TerminalNode Ident() { return getToken(QLParser.Ident, 0); } public OrExprContext orExpr() { return getRuleContext(OrExprContext.class,0); } public ValuetypeContext valuetype() { return getRuleContext(ValuetypeContext.class,0); } public TerminalNode Str() { return getToken(QLParser.Str, 0); } public QuestionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_question; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterQuestion(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitQuestion(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitQuestion(this); else return visitor.visitChildren(this); } } public final QuestionContext question() throws RecognitionException { QuestionContext _localctx = new QuestionContext(_ctx, getState()); enterRule(_localctx, 6, RULE_question); try { setState(74); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { setState(59); ((QuestionContext)_localctx).Ident = match(Ident); setState(60); match(7); setState(61); ((QuestionContext)_localctx).Str = match(Str); setState(62); ((QuestionContext)_localctx).v1 = valuetype(); ((QuestionContext)_localctx).result = new Question(new Ident((((QuestionContext)_localctx).Ident!=null?((QuestionContext)_localctx).Ident.getText():null)), new StrLiteral((((QuestionContext)_localctx).Str!=null?((QuestionContext)_localctx).Str.getText():null)), ((QuestionContext)_localctx).v1.result); } break; case 2: enterOuterAlt(_localctx, 2); { setState(65); ((QuestionContext)_localctx).Ident = match(Ident); setState(66); match(7); setState(67); ((QuestionContext)_localctx).Str = match(Str); setState(68); ((QuestionContext)_localctx).v2 = valuetype(); setState(69); match(6); setState(70); ((QuestionContext)_localctx).e1 = orExpr(); setState(71); match(2); ((QuestionContext)_localctx).result = new Computedquest (new Ident((((QuestionContext)_localctx).Ident!=null?((QuestionContext)_localctx).Ident.getText():null)), new StrLiteral((((QuestionContext)_localctx).Str!=null?((QuestionContext)_localctx).Str.getText():null)), ((QuestionContext)_localctx).v2.result, ((QuestionContext)_localctx).e1.result); } break; } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class IfstatementContext extends ParserRuleContext { public Statement result; public OrExprContext expr; public StatementsContext s; public StatementsContext statements() { return getRuleContext(StatementsContext.class,0); } public OrExprContext orExpr() { return getRuleContext(OrExprContext.class,0); } public IfstatementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_ifstatement; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterIfstatement(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitIfstatement(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitIfstatement(this); else return visitor.visitChildren(this); } } public final IfstatementContext ifstatement() throws RecognitionException { IfstatementContext _localctx = new IfstatementContext(_ctx, getState()); enterRule(_localctx, 8, RULE_ifstatement); try { enterOuterAlt(_localctx, 1); { setState(76); match(8); setState(77); match(6); setState(78); ((IfstatementContext)_localctx).expr = orExpr(); setState(79); match(2); setState(80); match(14); setState(81); ((IfstatementContext)_localctx).s = statements(0); setState(82); match(21); ((IfstatementContext)_localctx).result = new Ifstate(((IfstatementContext)_localctx).s.result, ((IfstatementContext)_localctx).expr.result); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class IfelsestateContext extends ParserRuleContext { public Statement result; public OrExprContext expr; public StatementsContext s1; public StatementsContext s2; public List<StatementsContext> statements() { return getRuleContexts(StatementsContext.class); } public OrExprContext orExpr() { return getRuleContext(OrExprContext.class,0); } public StatementsContext statements(int i) { return getRuleContext(StatementsContext.class,i); } public IfelsestateContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_ifelsestate; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterIfelsestate(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitIfelsestate(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitIfelsestate(this); else return visitor.visitChildren(this); } } public final IfelsestateContext ifelsestate() throws RecognitionException { IfelsestateContext _localctx = new IfelsestateContext(_ctx, getState()); enterRule(_localctx, 10, RULE_ifelsestate); try { enterOuterAlt(_localctx, 1); { setState(85); match(8); setState(86); match(6); setState(87); ((IfelsestateContext)_localctx).expr = orExpr(); setState(88); match(2); setState(89); match(14); setState(90); ((IfelsestateContext)_localctx).s1 = statements(0); setState(91); match(21); setState(92); match(18); setState(93); match(14); setState(94); ((IfelsestateContext)_localctx).s2 = statements(0); setState(95); match(21); ((IfelsestateContext)_localctx).result = new IfElse(((IfelsestateContext)_localctx).s1.result, ((IfelsestateContext)_localctx).expr.result, ((IfelsestateContext)_localctx).s2.result); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class UnExprContext extends ParserRuleContext { public Expr result; public UnExprContext x; public PrimaryContext r; public PrimaryContext primary() { return getRuleContext(PrimaryContext.class,0); } public UnExprContext unExpr() { return getRuleContext(UnExprContext.class,0); } public UnExprContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_unExpr; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterUnExpr(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitUnExpr(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitUnExpr(this); else return visitor.visitChildren(this); } } public final UnExprContext unExpr() throws RecognitionException { UnExprContext _localctx = new UnExprContext(_ctx, getState()); enterRule(_localctx, 12, RULE_unExpr); try { setState(113); switch (_input.LA(1)) { case 3: enterOuterAlt(_localctx, 1); { setState(98); match(3); setState(99); ((UnExprContext)_localctx).x = unExpr(); ((UnExprContext)_localctx).result = new Pos(((UnExprContext)_localctx).x.result); } break; case 5: enterOuterAlt(_localctx, 2); { setState(102); match(5); setState(103); ((UnExprContext)_localctx).x = unExpr(); ((UnExprContext)_localctx).result = new Neg(((UnExprContext)_localctx).x.result); } break; case 24: enterOuterAlt(_localctx, 3); { setState(106); match(24); setState(107); ((UnExprContext)_localctx).x = unExpr(); ((UnExprContext)_localctx).result = new Not(((UnExprContext)_localctx).x.result); } break; case 6: case Bool: case Ident: case Int: case Str: enterOuterAlt(_localctx, 4); { setState(110); ((UnExprContext)_localctx).r = primary(); ((UnExprContext)_localctx).result = ((UnExprContext)_localctx).r.result; } break; default: throw new NoViableAltException(this); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class MulExprContext extends ParserRuleContext { public Expr result; public UnExprContext lhs; public Token op; public UnExprContext rhs; public UnExprContext unExpr(int i) { return getRuleContext(UnExprContext.class,i); } public List<UnExprContext> unExpr() { return getRuleContexts(UnExprContext.class); } public MulExprContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_mulExpr; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterMulExpr(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitMulExpr(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitMulExpr(this); else return visitor.visitChildren(this); } } public final MulExprContext mulExpr() throws RecognitionException { MulExprContext _localctx = new MulExprContext(_ctx, getState()); enterRule(_localctx, 14, RULE_mulExpr); int _la; try { enterOuterAlt(_localctx, 1); { setState(115); ((MulExprContext)_localctx).lhs = unExpr(); ((MulExprContext)_localctx).result = ((MulExprContext)_localctx).lhs.result; setState(123); _errHandler.sync(this); _la = _input.LA(1); while (_la==4 || _la==19) { { { setState(117); ((MulExprContext)_localctx).op = _input.LT(1); _la = _input.LA(1); if ( !(_la==4 || _la==19) ) { ((MulExprContext)_localctx).op = (Token)_errHandler.recoverInline(this); } consume(); setState(118); ((MulExprContext)_localctx).rhs = unExpr(); if ((((MulExprContext)_localctx).op!=null?((MulExprContext)_localctx).op.getText():null).equals("*")) { ((MulExprContext)_localctx).result = new Mul(_localctx.result, ((MulExprContext)_localctx).rhs.result); } if ((((MulExprContext)_localctx).op!=null?((MulExprContext)_localctx).op.getText():null).equals("/")) { ((MulExprContext)_localctx).result = new Div(_localctx.result, ((MulExprContext)_localctx).rhs.result); } } } setState(125); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class AddExprContext extends ParserRuleContext { public Expr result; public MulExprContext lhs; public Token op; public MulExprContext rhs; public List<MulExprContext> mulExpr() { return getRuleContexts(MulExprContext.class); } public MulExprContext mulExpr(int i) { return getRuleContext(MulExprContext.class,i); } public AddExprContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_addExpr; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterAddExpr(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitAddExpr(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitAddExpr(this); else return visitor.visitChildren(this); } } public final AddExprContext addExpr() throws RecognitionException { AddExprContext _localctx = new AddExprContext(_ctx, getState()); enterRule(_localctx, 16, RULE_addExpr); int _la; try { enterOuterAlt(_localctx, 1); { setState(126); ((AddExprContext)_localctx).lhs = mulExpr(); ((AddExprContext)_localctx).result = ((AddExprContext)_localctx).lhs.result; setState(134); _errHandler.sync(this); _la = _input.LA(1); while (_la==3 || _la==5) { { { setState(128); ((AddExprContext)_localctx).op = _input.LT(1); _la = _input.LA(1); if ( !(_la==3 || _la==5) ) { ((AddExprContext)_localctx).op = (Token)_errHandler.recoverInline(this); } consume(); setState(129); ((AddExprContext)_localctx).rhs = mulExpr(); if ((((AddExprContext)_localctx).op!=null?((AddExprContext)_localctx).op.getText():null).equals("+")) { ((AddExprContext)_localctx).result = new Add(_localctx.result, ((AddExprContext)_localctx).rhs.result); } if ((((AddExprContext)_localctx).op!=null?((AddExprContext)_localctx).op.getText():null).equals("-")) { ((AddExprContext)_localctx).result = new Sub(_localctx.result, ((AddExprContext)_localctx).rhs.result); } } } setState(136); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class RelExprContext extends ParserRuleContext { public Expr result; public AddExprContext lhs; public Token op; public AddExprContext rhs; public List<AddExprContext> addExpr() { return getRuleContexts(AddExprContext.class); } public AddExprContext addExpr(int i) { return getRuleContext(AddExprContext.class,i); } public RelExprContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_relExpr; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterRelExpr(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitRelExpr(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitRelExpr(this); else return visitor.visitChildren(this); } } public final RelExprContext relExpr() throws RecognitionException { RelExprContext _localctx = new RelExprContext(_ctx, getState()); enterRule(_localctx, 18, RULE_relExpr); int _la; try { enterOuterAlt(_localctx, 1); { setState(137); ((RelExprContext)_localctx).lhs = addExpr(); ((RelExprContext)_localctx).result = ((RelExprContext)_localctx).lhs.result; setState(145); _errHandler.sync(this); _la = _input.LA(1); while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << 9) | (1L << 10) | (1L << 11) | (1L << 15) | (1L << 20) | (1L << 22))) != 0)) { { { setState(139); ((RelExprContext)_localctx).op = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << 9) | (1L << 10) | (1L << 11) | (1L << 15) | (1L << 20) | (1L << 22))) != 0)) ) { ((RelExprContext)_localctx).op = (Token)_errHandler.recoverInline(this); } consume(); setState(140); ((RelExprContext)_localctx).rhs = addExpr(); if ((((RelExprContext)_localctx).op!=null?((RelExprContext)_localctx).op.getText():null).equals("<")) { ((RelExprContext)_localctx).result = new LT(_localctx.result, ((RelExprContext)_localctx).rhs.result); } if ((((RelExprContext)_localctx).op!=null?((RelExprContext)_localctx).op.getText():null).equals("<=")) { ((RelExprContext)_localctx).result = new LEq(_localctx.result, ((RelExprContext)_localctx).rhs.result); } if ((((RelExprContext)_localctx).op!=null?((RelExprContext)_localctx).op.getText():null).equals(">")) { ((RelExprContext)_localctx).result = new GT(_localctx.result, ((RelExprContext)_localctx).rhs.result); } if ((((RelExprContext)_localctx).op!=null?((RelExprContext)_localctx).op.getText():null).equals(">=")) { ((RelExprContext)_localctx).result = new GEq(_localctx.result, ((RelExprContext)_localctx).rhs.result); } if ((((RelExprContext)_localctx).op!=null?((RelExprContext)_localctx).op.getText():null).equals("==")) { ((RelExprContext)_localctx).result = new Eq(_localctx.result, ((RelExprContext)_localctx).rhs.result); } if ((((RelExprContext)_localctx).op!=null?((RelExprContext)_localctx).op.getText():null).equals("!=")) { ((RelExprContext)_localctx).result = new NEq(_localctx.result, ((RelExprContext)_localctx).rhs.result); } } } setState(147); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class AndExprContext extends ParserRuleContext { public Expr result; public RelExprContext lhs; public RelExprContext rhs; public List<RelExprContext> relExpr() { return getRuleContexts(RelExprContext.class); } public RelExprContext relExpr(int i) { return getRuleContext(RelExprContext.class,i); } public AndExprContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_andExpr; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterAndExpr(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitAndExpr(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitAndExpr(this); else return visitor.visitChildren(this); } } public final AndExprContext andExpr() throws RecognitionException { AndExprContext _localctx = new AndExprContext(_ctx, getState()); enterRule(_localctx, 20, RULE_andExpr); int _la; try { enterOuterAlt(_localctx, 1); { setState(148); ((AndExprContext)_localctx).lhs = relExpr(); ((AndExprContext)_localctx).result = ((AndExprContext)_localctx).lhs.result; setState(156); _errHandler.sync(this); _la = _input.LA(1); while (_la==12) { { { setState(150); match(12); setState(151); ((AndExprContext)_localctx).rhs = relExpr(); ((AndExprContext)_localctx).result = new And(_localctx.result, ((AndExprContext)_localctx).rhs.result); } } setState(158); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class OrExprContext extends ParserRuleContext { public Expr result; public AndExprContext lhs; public AndExprContext rhs; public AndExprContext andExpr(int i) { return getRuleContext(AndExprContext.class,i); } public List<AndExprContext> andExpr() { return getRuleContexts(AndExprContext.class); } public OrExprContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_orExpr; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterOrExpr(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitOrExpr(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitOrExpr(this); else return visitor.visitChildren(this); } } public final OrExprContext orExpr() throws RecognitionException { OrExprContext _localctx = new OrExprContext(_ctx, getState()); enterRule(_localctx, 22, RULE_orExpr); int _la; try { enterOuterAlt(_localctx, 1); { setState(159); ((OrExprContext)_localctx).lhs = andExpr(); ((OrExprContext)_localctx).result = ((OrExprContext)_localctx).lhs.result; setState(167); _errHandler.sync(this); _la = _input.LA(1); while (_la==13) { { { setState(161); match(13); setState(162); ((OrExprContext)_localctx).rhs = andExpr(); ((OrExprContext)_localctx).result = new Or(_localctx.result, ((OrExprContext)_localctx).rhs.result); } } setState(169); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class PrimaryContext extends ParserRuleContext { public Expr result; public Token Bool; public Token Int; public Token Ident; public Token Str; public OrExprContext x; public TerminalNode Ident() { return getToken(QLParser.Ident, 0); } public TerminalNode Bool() { return getToken(QLParser.Bool, 0); } public OrExprContext orExpr() { return getRuleContext(OrExprContext.class,0); } public TerminalNode Str() { return getToken(QLParser.Str, 0); } public TerminalNode Int() { return getToken(QLParser.Int, 0); } public PrimaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_primary; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterPrimary(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitPrimary(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitPrimary(this); else return visitor.visitChildren(this); } } public final PrimaryContext primary() throws RecognitionException { PrimaryContext _localctx = new PrimaryContext(_ctx, getState()); enterRule(_localctx, 24, RULE_primary); try { setState(183); switch (_input.LA(1)) { case Bool: enterOuterAlt(_localctx, 1); { setState(170); ((PrimaryContext)_localctx).Bool = match(Bool); ((PrimaryContext)_localctx).result = new BoolLiteral(Boolean.parseBoolean((((PrimaryContext)_localctx).Bool!=null?((PrimaryContext)_localctx).Bool.getText():null))); } break; case Int: enterOuterAlt(_localctx, 2); { setState(172); ((PrimaryContext)_localctx).Int = match(Int); ((PrimaryContext)_localctx).result = new IntLiteral(Integer.parseInt((((PrimaryContext)_localctx).Int!=null?((PrimaryContext)_localctx).Int.getText():null))); } break; case Ident: enterOuterAlt(_localctx, 3); { setState(174); ((PrimaryContext)_localctx).Ident = match(Ident); ((PrimaryContext)_localctx).result = new Ident((((PrimaryContext)_localctx).Ident!=null?((PrimaryContext)_localctx).Ident.getText():null)); } break; case Str: enterOuterAlt(_localctx, 4); { setState(176); ((PrimaryContext)_localctx).Str = match(Str); ((PrimaryContext)_localctx).result = new StrLiteral((((PrimaryContext)_localctx).Str!=null?((PrimaryContext)_localctx).Str.getText():null)); } break; case 6: enterOuterAlt(_localctx, 5); { setState(178); match(6); setState(179); ((PrimaryContext)_localctx).x = orExpr(); setState(180); match(2); ((PrimaryContext)_localctx).result = ((PrimaryContext)_localctx).x.result; } break; default: throw new NoViableAltException(this); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class ValuetypeContext extends ParserRuleContext { public Type result; public ValuetypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_valuetype; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).enterValuetype(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof QLListener ) ((QLListener)listener).exitValuetype(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof QLVisitor ) return ((QLVisitor<? extends T>)visitor).visitValuetype(this); else return visitor.visitChildren(this); } } public final ValuetypeContext valuetype() throws RecognitionException { ValuetypeContext _localctx = new ValuetypeContext(_ctx, getState()); enterRule(_localctx, 26, RULE_valuetype); try { setState(191); switch (_input.LA(1)) { case 23: enterOuterAlt(_localctx, 1); { setState(185); match(23); ((ValuetypeContext)_localctx).result = new Booltype(); } break; case 16: enterOuterAlt(_localctx, 2); { setState(187); match(16); ((ValuetypeContext)_localctx).result = new Inttype(); } break; case 17: enterOuterAlt(_localctx, 3); { setState(189); match(17); ((ValuetypeContext)_localctx).result = new Strtype(); } break; default: throw new NoViableAltException(this); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: return statements_sempred((StatementsContext)_localctx, predIndex); } return true; } private boolean statements_sempred(StatementsContext _localctx, int predIndex) { switch (predIndex) { case 0: return 1 >= _localctx._p; } return true; } public static final String _serializedATN = "\3\uacf5\uee8c\u4f5d\u8b0d\u4a45\u78bd\u1b2f\u3378\3 \u00c4\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3"+ "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\7\3.\n\3\f\3\16\3\61\13\3\3\4\3\4\3\4\3"+ "\4\3\4\3\4\3\4\3\4\3\4\5\4<\n\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3"+ "\5\3\5\3\5\3\5\3\5\3\5\5\5M\n\5\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3"+ "\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b"+ "\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\5\bt\n\b\3\t\3\t\3\t\3\t\3\t"+ "\3\t\7\t|\n\t\f\t\16\t\177\13\t\3\n\3\n\3\n\3\n\3\n\3\n\7\n\u0087\n\n"+ "\f\n\16\n\u008a\13\n\3\13\3\13\3\13\3\13\3\13\3\13\7\13\u0092\n\13\f\13"+ "\16\13\u0095\13\13\3\f\3\f\3\f\3\f\3\f\3\f\7\f\u009d\n\f\f\f\16\f\u00a0"+ "\13\f\3\r\3\r\3\r\3\r\3\r\3\r\7\r\u00a8\n\r\f\r\16\r\u00ab\13\r\3\16\3"+ "\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u00ba"+ "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\5\17\u00c2\n\17\3\17\2\20\2\4\6\b"+ "\n\f\16\20\22\24\26\30\32\34\2\5\4\2\6\6\25\25\4\2\5\5\7\7\6\2\13\r\21"+ "\21\26\26\30\30\u00c7\2\36\3\2\2\2\4%\3\2\2\2\6;\3\2\2\2\bL\3\2\2\2\n"+ "N\3\2\2\2\fW\3\2\2\2\16s\3\2\2\2\20u\3\2\2\2\22\u0080\3\2\2\2\24\u008b"+ "\3\2\2\2\26\u0096\3\2\2\2\30\u00a1\3\2\2\2\32\u00b9\3\2\2\2\34\u00c1\3"+ "\2\2\2\36\37\7\3\2\2\37 \7\36\2\2 !\7\20\2\2!\"\5\4\3\2\"#\7\27\2\2#$"+ "\b\2\1\2$\3\3\2\2\2%&\b\3\1\2&\'\5\6\4\2\'(\b\3\1\2(/\3\2\2\2)*\6\3\2"+ "\3*+\5\6\4\2+,\b\3\1\2,.\3\2\2\2-)\3\2\2\2.\61\3\2\2\2/-\3\2\2\2/\60\3"+ "\2\2\2\60\5\3\2\2\2\61/\3\2\2\2\62\63\5\b\5\2\63\64\b\4\1\2\64<\3\2\2"+ "\2\65\66\5\n\6\2\66\67\b\4\1\2\67<\3\2\2\289\5\f\7\29:\b\4\1\2:<\3\2\2"+ "\2;\62\3\2\2\2;\65\3\2\2\2;8\3\2\2\2<\7\3\2\2\2=>\7\36\2\2>?\7\t\2\2?"+ "@\7 \2\2@A\5\34\17\2AB\b\5\1\2BM\3\2\2\2CD\7\36\2\2DE\7\t\2\2EF\7 \2\2"+ "FG\5\34\17\2GH\7\b\2\2HI\5\30\r\2IJ\7\4\2\2JK\b\5\1\2KM\3\2\2\2L=\3\2"+ "\2\2LC\3\2\2\2M\t\3\2\2\2NO\7\n\2\2OP\7\b\2\2PQ\5\30\r\2QR\7\4\2\2RS\7"+ "\20\2\2ST\5\4\3\2TU\7\27\2\2UV\b\6\1\2V\13\3\2\2\2WX\7\n\2\2XY\7\b\2\2"+ "YZ\5\30\r\2Z[\7\4\2\2[\\\7\20\2\2\\]\5\4\3\2]^\7\27\2\2^_\7\24\2\2_`\7"+ "\20\2\2`a\5\4\3\2ab\7\27\2\2bc\b\7\1\2c\r\3\2\2\2de\7\5\2\2ef\5\16\b\2"+ "fg\b\b\1\2gt\3\2\2\2hi\7\7\2\2ij\5\16\b\2jk\b\b\1\2kt\3\2\2\2lm\7\32\2"+ "\2mn\5\16\b\2no\b\b\1\2ot\3\2\2\2pq\5\32\16\2qr\b\b\1\2rt\3\2\2\2sd\3"+ "\2\2\2sh\3\2\2\2sl\3\2\2\2sp\3\2\2\2t\17\3\2\2\2uv\5\16\b\2v}\b\t\1\2"+ "wx\t\2\2\2xy\5\16\b\2yz\b\t\1\2z|\3\2\2\2{w\3\2\2\2|\177\3\2\2\2}{\3\2"+ "\2\2}~\3\2\2\2~\21\3\2\2\2\177}\3\2\2\2\u0080\u0081\5\20\t\2\u0081\u0088"+ "\b\n\1\2\u0082\u0083\t\3\2\2\u0083\u0084\5\20\t\2\u0084\u0085\b\n\1\2"+ "\u0085\u0087\3\2\2\2\u0086\u0082\3\2\2\2\u0087\u008a\3\2\2\2\u0088\u0086"+ "\3\2\2\2\u0088\u0089\3\2\2\2\u0089\23\3\2\2\2\u008a\u0088\3\2\2\2\u008b"+ "\u008c\5\22\n\2\u008c\u0093\b\13\1\2\u008d\u008e\t\4\2\2\u008e\u008f\5"+ "\22\n\2\u008f\u0090\b\13\1\2\u0090\u0092\3\2\2\2\u0091\u008d\3\2\2\2\u0092"+ "\u0095\3\2\2\2\u0093\u0091\3\2\2\2\u0093\u0094\3\2\2\2\u0094\25\3\2\2"+ "\2\u0095\u0093\3\2\2\2\u0096\u0097\5\24\13\2\u0097\u009e\b\f\1\2\u0098"+ "\u0099\7\16\2\2\u0099\u009a\5\24\13\2\u009a\u009b\b\f\1\2\u009b\u009d"+ "\3\2\2\2\u009c\u0098\3\2\2\2\u009d\u00a0\3\2\2\2\u009e\u009c\3\2\2\2\u009e"+ "\u009f\3\2\2\2\u009f\27\3\2\2\2\u00a0\u009e\3\2\2\2\u00a1\u00a2\5\26\f"+ "\2\u00a2\u00a9\b\r\1\2\u00a3\u00a4\7\17\2\2\u00a4\u00a5\5\26\f\2\u00a5"+ "\u00a6\b\r\1\2\u00a6\u00a8\3\2\2\2\u00a7\u00a3\3\2\2\2\u00a8\u00ab\3\2"+ "\2\2\u00a9\u00a7\3\2\2\2\u00a9\u00aa\3\2\2\2\u00aa\31\3\2\2\2\u00ab\u00a9"+ "\3\2\2\2\u00ac\u00ad\7\35\2\2\u00ad\u00ba\b\16\1\2\u00ae\u00af\7\37\2"+ "\2\u00af\u00ba\b\16\1\2\u00b0\u00b1\7\36\2\2\u00b1\u00ba\b\16\1\2\u00b2"+ "\u00b3\7 \2\2\u00b3\u00ba\b\16\1\2\u00b4\u00b5\7\b\2\2\u00b5\u00b6\5\30"+ "\r\2\u00b6\u00b7\7\4\2\2\u00b7\u00b8\b\16\1\2\u00b8\u00ba\3\2\2\2\u00b9"+ "\u00ac\3\2\2\2\u00b9\u00ae\3\2\2\2\u00b9\u00b0\3\2\2\2\u00b9\u00b2\3\2"+ "\2\2\u00b9\u00b4\3\2\2\2\u00ba\33\3\2\2\2\u00bb\u00bc\7\31\2\2\u00bc\u00c2"+ "\b\17\1\2\u00bd\u00be\7\22\2\2\u00be\u00c2\b\17\1\2\u00bf\u00c0\7\23\2"+ "\2\u00c0\u00c2\b\17\1\2\u00c1\u00bb\3\2\2\2\u00c1\u00bd\3\2\2\2\u00c1"+ "\u00bf\3\2\2\2\u00c2\35\3\2\2\2\r/;Ls}\u0088\u0093\u009e\u00a9\u00b9\u00c1"; public static final ATN _ATN = ATNSimulator.deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
package xal.schemas; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import javax.xml.XMLConstants; import javax.xml.transform.dom.DOMSource; import javax.xml.validation.Schema; import javax.xml.validation.Validator; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * Unit test case for <code>ModelConfig.xsd</code> XML schema using <code>ModelConfig.xml</code> structure. * @author <a href='jakob.battelino@cosylab.com'>Jakob Battelino Prelog</a> */ public class ModelConfigTest extends AbstractXMLValidation { @Override public void progressiveSchemaValidation() { //Get XML schema. Schema schema = null; try { schema = getSchema(); } catch(Exception e) { fail(e.getMessage()); } assertNotNull(schema); //Create a new DOM document. Document document = null; try { document = getDocumentBuilder().newDocument(); } catch(Exception e) { fail(e.getMessage()); } assertNotNull(document); Validator validator = schema.newValidator(); //Blank document should be valid. try { validator.validate(new DOMSource(document)); } catch(Exception e) { fail("Blank document should be valid!"); } //Add and test sources element. Element root = testRoot(document, validator); //Add and test elements element. testElements(document, root, validator); //Add and test hardware element. /*testHardware(document, root, validator);*/ //Add and test associations element. testAssociations(document, root, validator); } @Override protected Document getTestDocument() throws Exception { return readDocument(DIR_TEST_XMLS+"modelconfig_test.xml"); } @Override protected Schema getSchema() throws Exception { return readSchema(DIR_SCHEMAS+"model-impl.xsd", XMLConstants.W3C_XML_SCHEMA_NS_URI); } @Override protected Document getExternalDocument() throws Exception { return readDocument(DIR_TEST_XMLS+"modelconfig_test.xml"); } private static Element testRoot(Document document, Validator validator) { //Fake root element try { Document testDoc = (Document)document.cloneNode(true); Element fakeElement = testDoc.createElement("fake1"); testDoc.appendChild(fakeElement); validator.validate(new DOMSource(testDoc)); fail("Validation with incorrect root element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Cannot find the declaration of element 'fake1'.")); } //Correct root element. Element root = document.createElement("configuration"); root.setAttributeNS("http://www.w3.org/2001/XMLSchema-instance", "xsi:noNamespaceSchemaLocation", "http://sourceforge.net/p/xaldev/openxal/ci/master/tree/core/resources/xal/schemas/ModelConfig.xsd?format=raw"); document.appendChild(root); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete root element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'configuration' is not complete.")); } //Fake root child try { Document testDoc = (Document)document.cloneNode(true); Element testRoot = (Element)testDoc.getElementsByTagName("configuration").item(0); Element fakeElement = testDoc.createElement("fake1"); testRoot.appendChild(fakeElement); validator.validate(new DOMSource(testDoc)); fail("Validation with incorrect root child element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Invalid content was found starting with element 'fake1'.")); } return root; } private static Element testElements(Document document, Element root, Validator validator) { //Add elements element. Element elements = document.createElement("elements"); root.appendChild(elements); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete elements element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'elements' is not complete.")); } //Add default element. Element defaultElement = document.createElement("default"); elements.appendChild(defaultElement); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete default element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'type' attribute. defaultElement.setAttribute("type", "fake.package.model.Default"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete elements element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'elements' is not complete.")); assertFalse(e.getMessage().contains("default")); } //Add drift element. Element driftElement = document.createElement("drift"); elements.appendChild(driftElement); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete drift element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'type' attribute. driftElement.setAttribute("type", "fake.package.model.Drift"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete configuration element should not be successful!"); } catch(Exception e) { // assertTrue(e.getMessage().contains("The content of element 'elements' is not complete.")); assertFalse(e.getMessage().contains("{drift}")); } //Add RF cavity drift element. Element rfcavdriftElement = document.createElement("rfcavdrift"); elements.appendChild(rfcavdriftElement); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete rfcavdrift element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'type' attribute. rfcavdriftElement.setAttribute("type", "fake.package.model.RfCavDrift"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete configuration element should not be successful!"); } catch(Exception e) { // assertTrue(e.getMessage().contains("The content of element 'elements' is not complete.")); assertFalse(e.getMessage().contains("rfcavdrift")); } //Add sequence element. Element seqElement = document.createElement("sequence"); elements.appendChild(seqElement); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete sequence element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'type' attribute. seqElement.setAttribute("type", "fake.package.model.Sector"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete configuration element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'configuration' is not complete.")); assertFalse(e.getMessage().contains("sequence")); } return elements; } /* private static Element testHardware(Document document, Element root, Validator validator) { //Add hardware element. Element hardware = document.createElement("hardware"); root.appendChild(hardware); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete hardware element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'hardware' is not complete.")); } //Add thin elements. Element thin1 = document.createElement("thin"); hardware.appendChild(thin1); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete thin element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'type' attribute. thin1.setAttribute("type", "fake.package.model.Thin1"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete hardware element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'hardware' is not complete.")); } Element thin2 = document.createElement("thin"); hardware.appendChild(thin2); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete thin element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'type' attribute. thin2.setAttribute("type", "fake.package.model.Thin2"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete hardware element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'hardware' is not complete.")); } //Add thick elements. Element thick1 = document.createElement("thick"); hardware.appendChild(thick1); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete thick element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'type' attribute. thick1.setAttribute("type", "fake.package.model.Thick1"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete hardware element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'hardware' is not complete.")); } Element thick2 = document.createElement("thick"); hardware.appendChild(thick2); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete thick element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'type' attribute. thick2.setAttribute("type", "fake.package.model.Thick2"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete hardware element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'hardware' is not complete.")); } //Add split element. Element split = document.createElement("split"); hardware.appendChild(split); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete configuration element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'configuration' is not complete.")); assertFalse(e.getMessage().contains("thin")); assertFalse(e.getMessage().contains("thick")); } return hardware; }*/ private static Element testAssociations(Document document, Element root, Validator validator) { //Add associations element. Element associations = document.createElement("associations"); root.appendChild(associations); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete associations element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'associations' is not complete.")); } //Add and test basic map element. testBasicMap(document, associations, validator); //Add and test basic map element. /*testStaticMap(document, associations, validator); //Add and test basic map element. testSynchronizedMap(document, associations, validator); */ return associations; } private static Element testBasicMap(Document document, Element associations, Validator validator) { //Add basic map element. Element map = document.createElement("map"); associations.appendChild(map); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete map element should not be successful!"); } catch(Exception e) { assertTrue( e.getMessage().contains("Attribute 'smf' must appear on element") || e.getMessage().contains("Attribute 'model' must appear on element")); } //Add 'smf' and 'model' attributes. map.setAttribute("smf", "fake.package.BasicMap"); map.setAttribute("model", "fake.package.model.BasicMap"); try { validator.validate(new DOMSource(document)); } catch(Exception e) { fail("Document should now be valid!"); } return map; } /* private static Element testStaticMap(Document document, Element associations, Validator validator) { //Add static map element. Element map = document.createElement("map"); associations.appendChild(map); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete map element should not be successful!"); } catch(Exception e) { assertTrue( e.getMessage().contains("Attribute 'smf' must appear on element") || e.getMessage().contains("Attribute 'model' must appear on element")); } //Add 'smf' and 'model' attributes. map.setAttribute("smf", "fake.package.BasicMap"); map.setAttribute("model", "fake.package.model.BasicMap"); try { validator.validate(new DOMSource(document)); } catch(Exception e) { fail("Document should now be valid!"); } //Add static element. Element staticElement = document.createElement("static"); map.appendChild(staticElement); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete static element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'static' is not complete.")); } //Add first property element. Element property1 = document.createElement("property"); staticElement.appendChild(property1); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete property element should not be successful!"); } catch(Exception e) { assertTrue( e.getMessage().contains("Attribute 'value' must appear on element") || e.getMessage().contains("Attribute 'name' must appear on element")); } //Add 'name' and 'value' attributes. property1.setAttribute("name", "test1"); property1.setAttribute("value", "1"); try { validator.validate(new DOMSource(document)); } catch(Exception e) { fail("Document should now be valid!"); } //Add second property element. Element property2 = document.createElement("property"); property2.setAttribute("name", "test2"); property2.setAttribute("value", "2"); staticElement.appendChild(property2); try { validator.validate(new DOMSource(document)); fail("Validation with more than one property element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("No child element is expected at this point.")); } staticElement.removeChild(property2); return map; } private static Element testSynchronizedMap(Document document, Element associations, Validator validator) { //Add synchronized map element. Element map = document.createElement("map"); associations.appendChild(map); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete map element should not be successful!"); } catch(Exception e) { assertTrue( e.getMessage().contains("Attribute 'smf' must appear on element") || e.getMessage().contains("Attribute 'model' must appear on element")); } //Add 'smf' and 'model' attributes. map.setAttribute("smf", "fake.package.BasicMap"); map.setAttribute("model", "fake.package.model.BasicMap"); try { validator.validate(new DOMSource(document)); } catch(Exception e) { fail("Document should now be valid!"); } //Add first synchronize element. Element synchronize1 = document.createElement("synchronize"); map.appendChild(synchronize1); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete synchronize element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("Attribute 'mode' must appear on element")); } //Add 'mode' attribute. synchronize1.setAttribute("mode", "INIT"); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete synchronize element should not be successful!"); } catch(Exception e) { assertTrue(e.getMessage().contains("The content of element 'synchronize' is not complete.")); assertTrue(e.getMessage().contains("parameter")); } //Add first parameter element. Element parameter11 = document.createElement("parameter"); synchronize1.appendChild(parameter11); try { validator.validate(new DOMSource(document)); fail("Validation with incomplete parameter element should not be successful!"); } catch(Exception e) { assertTrue( e.getMessage().contains("Attribute 'name' must appear on element") || e.getMessage().contains("Attribute 'hget' must appear on element") || e.getMessage().contains("Attribute 'mset' must appear on element") || e.getMessage().contains("Attribute 'type' must appear on element")); } //Add 'name', 'hget', 'mset' and 'type' attributes. parameter11.setAttribute("name", "A"); parameter11.setAttribute("hget", "getField"); parameter11.setAttribute("mset", "setField"); parameter11.setAttribute("type", "java.lang.Double"); try { validator.validate(new DOMSource(document)); } catch(Exception e) { fail("Document should now be valid!"); } //Add second parameter element. Element parameter12 = document.createElement("parameter"); parameter12.setAttribute("name", "B"); parameter12.setAttribute("hget", "getField"); parameter12.setAttribute("mset", "setField"); parameter12.setAttribute("type", "java.lang.Double"); synchronize1.appendChild(parameter12); try { validator.validate(new DOMSource(document)); } catch(Exception e) { fail("Document should now be valid!"); } //Add second synchronize element. Element synchronize2 = document.createElement("synchronize"); Element parameter21 = document.createElement("parameter"); parameter21.setAttribute("name", "A"); parameter21.setAttribute("hget", "getField"); parameter21.setAttribute("mset", "setField"); parameter21.setAttribute("type", "java.lang.Double"); synchronize2.appendChild(parameter21); map.appendChild(synchronize2); //Test different synchronize modes. try { synchronize2.setAttribute("mode", "INIT"); validator.validate(new DOMSource(document)); synchronize2.setAttribute("mode", "LIVE"); validator.validate(new DOMSource(document)); synchronize2.setAttribute("mode", "DESIGN"); validator.validate(new DOMSource(document)); synchronize2.setAttribute("mode", "RF_DESIGN"); validator.validate(new DOMSource(document)); } catch(Exception e) { System.out.println(e.getMessage()); fail("Schema should support modes: INIT, LIVE, DESIGN, RF_DESIGN!"); } return map; }*/ }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.lambda.model; import java.io.Serializable; /** * <p> * Provides configuration information about a Lambda function version alias. * </p> */ public class AliasConfiguration implements Serializable, Cloneable { /** * <p> * Lambda function ARN that is qualified using the alias name as the suffix. * For example, if you create an alias called <code>BETA</code> that points * to a helloworld function version, the ARN is * <code>arn:aws:lambda:aws-regions:acct-id:function:helloworld:BETA</code>. * </p> */ private String aliasArn; /** * <p> * Alias name. * </p> */ private String name; /** * <p> * Function version to which the alias points. * </p> */ private String functionVersion; /** * <p> * Alias description. * </p> */ private String description; /** * <p> * Lambda function ARN that is qualified using the alias name as the suffix. * For example, if you create an alias called <code>BETA</code> that points * to a helloworld function version, the ARN is * <code>arn:aws:lambda:aws-regions:acct-id:function:helloworld:BETA</code>. * </p> * * @param aliasArn * Lambda function ARN that is qualified using the alias name as the * suffix. For example, if you create an alias called * <code>BETA</code> that points to a helloworld function version, * the ARN is * <code>arn:aws:lambda:aws-regions:acct-id:function:helloworld:BETA</code> * . */ public void setAliasArn(String aliasArn) { this.aliasArn = aliasArn; } /** * <p> * Lambda function ARN that is qualified using the alias name as the suffix. * For example, if you create an alias called <code>BETA</code> that points * to a helloworld function version, the ARN is * <code>arn:aws:lambda:aws-regions:acct-id:function:helloworld:BETA</code>. * </p> * * @return Lambda function ARN that is qualified using the alias name as the * suffix. For example, if you create an alias called * <code>BETA</code> that points to a helloworld function version, * the ARN is * <code>arn:aws:lambda:aws-regions:acct-id:function:helloworld:BETA</code> * . */ public String getAliasArn() { return this.aliasArn; } /** * <p> * Lambda function ARN that is qualified using the alias name as the suffix. * For example, if you create an alias called <code>BETA</code> that points * to a helloworld function version, the ARN is * <code>arn:aws:lambda:aws-regions:acct-id:function:helloworld:BETA</code>. * </p> * * @param aliasArn * Lambda function ARN that is qualified using the alias name as the * suffix. For example, if you create an alias called * <code>BETA</code> that points to a helloworld function version, * the ARN is * <code>arn:aws:lambda:aws-regions:acct-id:function:helloworld:BETA</code> * . * @return Returns a reference to this object so that method calls can be * chained together. */ public AliasConfiguration withAliasArn(String aliasArn) { setAliasArn(aliasArn); return this; } /** * <p> * Alias name. * </p> * * @param name * Alias name. */ public void setName(String name) { this.name = name; } /** * <p> * Alias name. * </p> * * @return Alias name. */ public String getName() { return this.name; } /** * <p> * Alias name. * </p> * * @param name * Alias name. * @return Returns a reference to this object so that method calls can be * chained together. */ public AliasConfiguration withName(String name) { setName(name); return this; } /** * <p> * Function version to which the alias points. * </p> * * @param functionVersion * Function version to which the alias points. */ public void setFunctionVersion(String functionVersion) { this.functionVersion = functionVersion; } /** * <p> * Function version to which the alias points. * </p> * * @return Function version to which the alias points. */ public String getFunctionVersion() { return this.functionVersion; } /** * <p> * Function version to which the alias points. * </p> * * @param functionVersion * Function version to which the alias points. * @return Returns a reference to this object so that method calls can be * chained together. */ public AliasConfiguration withFunctionVersion(String functionVersion) { setFunctionVersion(functionVersion); return this; } /** * <p> * Alias description. * </p> * * @param description * Alias description. */ public void setDescription(String description) { this.description = description; } /** * <p> * Alias description. * </p> * * @return Alias description. */ public String getDescription() { return this.description; } /** * <p> * Alias description. * </p> * * @param description * Alias description. * @return Returns a reference to this object so that method calls can be * chained together. */ public AliasConfiguration withDescription(String description) { setDescription(description); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAliasArn() != null) sb.append("AliasArn: " + getAliasArn() + ","); if (getName() != null) sb.append("Name: " + getName() + ","); if (getFunctionVersion() != null) sb.append("FunctionVersion: " + getFunctionVersion() + ","); if (getDescription() != null) sb.append("Description: " + getDescription()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AliasConfiguration == false) return false; AliasConfiguration other = (AliasConfiguration) obj; if (other.getAliasArn() == null ^ this.getAliasArn() == null) return false; if (other.getAliasArn() != null && other.getAliasArn().equals(this.getAliasArn()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getFunctionVersion() == null ^ this.getFunctionVersion() == null) return false; if (other.getFunctionVersion() != null && other.getFunctionVersion().equals(this.getFunctionVersion()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAliasArn() == null) ? 0 : getAliasArn().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getFunctionVersion() == null) ? 0 : getFunctionVersion() .hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); return hashCode; } @Override public AliasConfiguration clone() { try { return (AliasConfiguration) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * * Copyright (c) Microsoft and contributors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. package com.microsoft.azure.management.sql; import com.microsoft.azure.management.sql.models.ServiceTierAdvisor; import com.microsoft.azure.management.sql.models.ServiceTierAdvisorGetResponse; import com.microsoft.azure.management.sql.models.ServiceTierAdvisorListResponse; import com.microsoft.azure.management.sql.models.ServiceTierAdvisorProperties; import com.microsoft.azure.management.sql.models.SloUsageMetric; import com.microsoft.windowsazure.core.ServiceOperations; import com.microsoft.windowsazure.core.utils.CollectionStringBuilder; import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.tracing.CloudTracing; import java.io.IOException; import java.io.InputStream; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Calendar; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.Future; import javax.xml.bind.DatatypeConverter; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.methods.HttpGet; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.node.ArrayNode; import org.codehaus.jackson.node.NullNode; /** * Represents all the operations for operating on service tier advisors. * Contains operations to: Retrieve. */ public class ServiceTierAdvisorOperationsImpl implements ServiceOperations<SqlManagementClientImpl>, ServiceTierAdvisorOperations { /** * Initializes a new instance of the ServiceTierAdvisorOperationsImpl class. * * @param client Reference to the service client. */ ServiceTierAdvisorOperationsImpl(SqlManagementClientImpl client) { this.client = client; } private SqlManagementClientImpl client; /** * Gets a reference to the * microsoft.azure.management.sql.SqlManagementClientImpl. * @return The Client value. */ public SqlManagementClientImpl getClient() { return this.client; } /** * Returns information about a service tier advisor. * * @param resourceGroupName Required. The name of the Resource Group. * @param serverName Required. The name of server. * @param databaseName Required. The name of database. * @param serviceTierAdvisorName Required. The name of service tier advisor. * @return Represents the response to a get service tier advisor request. */ @Override public Future<ServiceTierAdvisorGetResponse> getAsync(final String resourceGroupName, final String serverName, final String databaseName, final String serviceTierAdvisorName) { return this.getClient().getExecutorService().submit(new Callable<ServiceTierAdvisorGetResponse>() { @Override public ServiceTierAdvisorGetResponse call() throws Exception { return get(resourceGroupName, serverName, databaseName, serviceTierAdvisorName); } }); } /** * Returns information about a service tier advisor. * * @param resourceGroupName Required. The name of the Resource Group. * @param serverName Required. The name of server. * @param databaseName Required. The name of database. * @param serviceTierAdvisorName Required. The name of service tier advisor. * @throws IOException Signals that an I/O exception of some sort has * occurred. This class is the general class of exceptions produced by * failed or interrupted I/O operations. * @throws ServiceException Thrown if an unexpected response is found. * @return Represents the response to a get service tier advisor request. */ @Override public ServiceTierAdvisorGetResponse get(String resourceGroupName, String serverName, String databaseName, String serviceTierAdvisorName) throws IOException, ServiceException { // Validate if (resourceGroupName == null) { throw new NullPointerException("resourceGroupName"); } if (serverName == null) { throw new NullPointerException("serverName"); } if (databaseName == null) { throw new NullPointerException("databaseName"); } if (serviceTierAdvisorName == null) { throw new NullPointerException("serviceTierAdvisorName"); } // Tracing boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("resourceGroupName", resourceGroupName); tracingParameters.put("serverName", serverName); tracingParameters.put("databaseName", databaseName); tracingParameters.put("serviceTierAdvisorName", serviceTierAdvisorName); CloudTracing.enter(invocationId, this, "getAsync", tracingParameters); } // Construct URL String url = ""; url = url + "/subscriptions/"; if (this.getClient().getCredentials().getSubscriptionId() != null) { url = url + URLEncoder.encode(this.getClient().getCredentials().getSubscriptionId(), "UTF-8"); } url = url + "/resourceGroups/"; url = url + URLEncoder.encode(resourceGroupName, "UTF-8"); url = url + "/providers/"; url = url + "Microsoft.Sql"; url = url + "/servers/"; url = url + URLEncoder.encode(serverName, "UTF-8"); url = url + "/databases/"; url = url + URLEncoder.encode(databaseName, "UTF-8"); url = url + "/serviceTierAdvisors/"; url = url + URLEncoder.encode(serviceTierAdvisorName, "UTF-8"); ArrayList<String> queryParameters = new ArrayList<String>(); queryParameters.add("api-version=" + "2014-04-01"); if (queryParameters.size() > 0) { url = url + "?" + CollectionStringBuilder.join(queryParameters, "&"); } String baseUrl = this.getClient().getBaseUri().toString(); // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl.charAt(baseUrl.length() - 1) == '/') { baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0); } if (url.charAt(0) == '/') { url = url.substring(1); } url = baseUrl + "/" + url; url = url.replace(" ", "%20"); // Create HTTP transport objects HttpGet httpRequest = new HttpGet(url); // Set Headers // Send Request HttpResponse httpResponse = null; try { if (shouldTrace) { CloudTracing.sendRequest(invocationId, httpRequest); } httpResponse = this.getClient().getHttpClient().execute(httpRequest); if (shouldTrace) { CloudTracing.receiveResponse(invocationId, httpResponse); } int statusCode = httpResponse.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { ServiceException ex = ServiceException.createFromJson(httpRequest, null, httpResponse, httpResponse.getEntity()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } // Create Result ServiceTierAdvisorGetResponse result = null; // Deserialize Response if (statusCode == HttpStatus.SC_OK) { InputStream responseContent = httpResponse.getEntity().getContent(); result = new ServiceTierAdvisorGetResponse(); ObjectMapper objectMapper = new ObjectMapper(); JsonNode responseDoc = null; if (responseContent == null == false) { responseDoc = objectMapper.readTree(responseContent); } if (responseDoc != null && responseDoc instanceof NullNode == false) { ServiceTierAdvisor serviceTierAdvisorInstance = new ServiceTierAdvisor(); result.setServiceTierAdvisor(serviceTierAdvisorInstance); JsonNode propertiesValue = responseDoc.get("properties"); if (propertiesValue != null && propertiesValue instanceof NullNode == false) { ServiceTierAdvisorProperties propertiesInstance = new ServiceTierAdvisorProperties(); serviceTierAdvisorInstance.setProperties(propertiesInstance); JsonNode observationPeriodStartValue = propertiesValue.get("observationPeriodStart"); if (observationPeriodStartValue != null && observationPeriodStartValue instanceof NullNode == false) { Calendar observationPeriodStartInstance; observationPeriodStartInstance = DatatypeConverter.parseDateTime(observationPeriodStartValue.getTextValue()); propertiesInstance.setObservationPeriodStart(observationPeriodStartInstance); } JsonNode observationPeriodEndValue = propertiesValue.get("observationPeriodEnd"); if (observationPeriodEndValue != null && observationPeriodEndValue instanceof NullNode == false) { Calendar observationPeriodEndInstance; observationPeriodEndInstance = DatatypeConverter.parseDateTime(observationPeriodEndValue.getTextValue()); propertiesInstance.setObservationPeriodEnd(observationPeriodEndInstance); } JsonNode activeTimeRatioValue = propertiesValue.get("activeTimeRatio"); if (activeTimeRatioValue != null && activeTimeRatioValue instanceof NullNode == false) { double activeTimeRatioInstance; activeTimeRatioInstance = activeTimeRatioValue.getDoubleValue(); propertiesInstance.setActiveTimeRatio(activeTimeRatioInstance); } JsonNode minDtuValue = propertiesValue.get("minDtu"); if (minDtuValue != null && minDtuValue instanceof NullNode == false) { double minDtuInstance; minDtuInstance = minDtuValue.getDoubleValue(); propertiesInstance.setMinDtu(minDtuInstance); } JsonNode avgDtuValue = propertiesValue.get("avgDtu"); if (avgDtuValue != null && avgDtuValue instanceof NullNode == false) { double avgDtuInstance; avgDtuInstance = avgDtuValue.getDoubleValue(); propertiesInstance.setAvgDtu(avgDtuInstance); } JsonNode maxDtuValue = propertiesValue.get("maxDtu"); if (maxDtuValue != null && maxDtuValue instanceof NullNode == false) { double maxDtuInstance; maxDtuInstance = maxDtuValue.getDoubleValue(); propertiesInstance.setMaxDtu(maxDtuInstance); } JsonNode maxSizeInGBValue = propertiesValue.get("maxSizeInGB"); if (maxSizeInGBValue != null && maxSizeInGBValue instanceof NullNode == false) { double maxSizeInGBInstance; maxSizeInGBInstance = maxSizeInGBValue.getDoubleValue(); propertiesInstance.setMaxSizeInGB(maxSizeInGBInstance); } JsonNode serviceLevelObjectiveUsageMetricsArray = propertiesValue.get("serviceLevelObjectiveUsageMetrics"); if (serviceLevelObjectiveUsageMetricsArray != null && serviceLevelObjectiveUsageMetricsArray instanceof NullNode == false) { for (JsonNode serviceLevelObjectiveUsageMetricsValue : ((ArrayNode) serviceLevelObjectiveUsageMetricsArray)) { SloUsageMetric sloUsageMetricInstance = new SloUsageMetric(); propertiesInstance.getServiceLevelObjectiveUsageMetrics().add(sloUsageMetricInstance); JsonNode serviceLevelObjectiveValue = serviceLevelObjectiveUsageMetricsValue.get("serviceLevelObjective"); if (serviceLevelObjectiveValue != null && serviceLevelObjectiveValue instanceof NullNode == false) { String serviceLevelObjectiveInstance; serviceLevelObjectiveInstance = serviceLevelObjectiveValue.getTextValue(); sloUsageMetricInstance.setServiceLevelObjective(serviceLevelObjectiveInstance); } JsonNode serviceLevelObjectiveIdValue = serviceLevelObjectiveUsageMetricsValue.get("serviceLevelObjectiveId"); if (serviceLevelObjectiveIdValue != null && serviceLevelObjectiveIdValue instanceof NullNode == false) { String serviceLevelObjectiveIdInstance; serviceLevelObjectiveIdInstance = serviceLevelObjectiveIdValue.getTextValue(); sloUsageMetricInstance.setServiceLevelObjectiveId(serviceLevelObjectiveIdInstance); } JsonNode inRangeTimeRatioValue = serviceLevelObjectiveUsageMetricsValue.get("inRangeTimeRatio"); if (inRangeTimeRatioValue != null && inRangeTimeRatioValue instanceof NullNode == false) { double inRangeTimeRatioInstance; inRangeTimeRatioInstance = inRangeTimeRatioValue.getDoubleValue(); sloUsageMetricInstance.setInRangeTimeRatio(inRangeTimeRatioInstance); } JsonNode idValue = serviceLevelObjectiveUsageMetricsValue.get("id"); if (idValue != null && idValue instanceof NullNode == false) { String idInstance; idInstance = idValue.getTextValue(); sloUsageMetricInstance.setId(idInstance); } JsonNode nameValue = serviceLevelObjectiveUsageMetricsValue.get("name"); if (nameValue != null && nameValue instanceof NullNode == false) { String nameInstance; nameInstance = nameValue.getTextValue(); sloUsageMetricInstance.setName(nameInstance); } JsonNode typeValue = serviceLevelObjectiveUsageMetricsValue.get("type"); if (typeValue != null && typeValue instanceof NullNode == false) { String typeInstance; typeInstance = typeValue.getTextValue(); sloUsageMetricInstance.setType(typeInstance); } JsonNode locationValue = serviceLevelObjectiveUsageMetricsValue.get("location"); if (locationValue != null && locationValue instanceof NullNode == false) { String locationInstance; locationInstance = locationValue.getTextValue(); sloUsageMetricInstance.setLocation(locationInstance); } JsonNode tagsSequenceElement = ((JsonNode) serviceLevelObjectiveUsageMetricsValue.get("tags")); if (tagsSequenceElement != null && tagsSequenceElement instanceof NullNode == false) { Iterator<Map.Entry<String, JsonNode>> itr = tagsSequenceElement.getFields(); while (itr.hasNext()) { Map.Entry<String, JsonNode> property = itr.next(); String tagsKey = property.getKey(); String tagsValue = property.getValue().getTextValue(); sloUsageMetricInstance.getTags().put(tagsKey, tagsValue); } } } } JsonNode currentServiceLevelObjectiveValue = propertiesValue.get("currentServiceLevelObjective"); if (currentServiceLevelObjectiveValue != null && currentServiceLevelObjectiveValue instanceof NullNode == false) { String currentServiceLevelObjectiveInstance; currentServiceLevelObjectiveInstance = currentServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setCurrentServiceLevelObjective(currentServiceLevelObjectiveInstance); } JsonNode currentServiceLevelObjectiveIdValue = propertiesValue.get("currentServiceLevelObjectiveId"); if (currentServiceLevelObjectiveIdValue != null && currentServiceLevelObjectiveIdValue instanceof NullNode == false) { String currentServiceLevelObjectiveIdInstance; currentServiceLevelObjectiveIdInstance = currentServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setCurrentServiceLevelObjectiveId(currentServiceLevelObjectiveIdInstance); } JsonNode usageBasedRecommendationServiceLevelObjectiveValue = propertiesValue.get("usageBasedRecommendationServiceLevelObjective"); if (usageBasedRecommendationServiceLevelObjectiveValue != null && usageBasedRecommendationServiceLevelObjectiveValue instanceof NullNode == false) { String usageBasedRecommendationServiceLevelObjectiveInstance; usageBasedRecommendationServiceLevelObjectiveInstance = usageBasedRecommendationServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setUsageBasedRecommendationServiceLevelObjective(usageBasedRecommendationServiceLevelObjectiveInstance); } JsonNode usageBasedRecommendationServiceLevelObjectiveIdValue = propertiesValue.get("usageBasedRecommendationServiceLevelObjectiveId"); if (usageBasedRecommendationServiceLevelObjectiveIdValue != null && usageBasedRecommendationServiceLevelObjectiveIdValue instanceof NullNode == false) { String usageBasedRecommendationServiceLevelObjectiveIdInstance; usageBasedRecommendationServiceLevelObjectiveIdInstance = usageBasedRecommendationServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setUsageBasedRecommendationServiceLevelObjectiveId(usageBasedRecommendationServiceLevelObjectiveIdInstance); } JsonNode databaseSizeBasedRecommendationServiceLevelObjectiveValue = propertiesValue.get("databaseSizeBasedRecommendationServiceLevelObjective"); if (databaseSizeBasedRecommendationServiceLevelObjectiveValue != null && databaseSizeBasedRecommendationServiceLevelObjectiveValue instanceof NullNode == false) { String databaseSizeBasedRecommendationServiceLevelObjectiveInstance; databaseSizeBasedRecommendationServiceLevelObjectiveInstance = databaseSizeBasedRecommendationServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setDatabaseSizeBasedRecommendationServiceLevelObjective(databaseSizeBasedRecommendationServiceLevelObjectiveInstance); } JsonNode databaseSizeBasedRecommendationServiceLevelObjectiveIdValue = propertiesValue.get("databaseSizeBasedRecommendationServiceLevelObjectiveId"); if (databaseSizeBasedRecommendationServiceLevelObjectiveIdValue != null && databaseSizeBasedRecommendationServiceLevelObjectiveIdValue instanceof NullNode == false) { String databaseSizeBasedRecommendationServiceLevelObjectiveIdInstance; databaseSizeBasedRecommendationServiceLevelObjectiveIdInstance = databaseSizeBasedRecommendationServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setDatabaseSizeBasedRecommendationServiceLevelObjectiveId(databaseSizeBasedRecommendationServiceLevelObjectiveIdInstance); } JsonNode disasterPlanBasedRecommendationServiceLevelObjectiveValue = propertiesValue.get("disasterPlanBasedRecommendationServiceLevelObjective"); if (disasterPlanBasedRecommendationServiceLevelObjectiveValue != null && disasterPlanBasedRecommendationServiceLevelObjectiveValue instanceof NullNode == false) { String disasterPlanBasedRecommendationServiceLevelObjectiveInstance; disasterPlanBasedRecommendationServiceLevelObjectiveInstance = disasterPlanBasedRecommendationServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setDisasterPlanBasedRecommendationServiceLevelObjective(disasterPlanBasedRecommendationServiceLevelObjectiveInstance); } JsonNode disasterPlanBasedRecommendationServiceLevelObjectiveIdValue = propertiesValue.get("disasterPlanBasedRecommendationServiceLevelObjectiveId"); if (disasterPlanBasedRecommendationServiceLevelObjectiveIdValue != null && disasterPlanBasedRecommendationServiceLevelObjectiveIdValue instanceof NullNode == false) { String disasterPlanBasedRecommendationServiceLevelObjectiveIdInstance; disasterPlanBasedRecommendationServiceLevelObjectiveIdInstance = disasterPlanBasedRecommendationServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setDisasterPlanBasedRecommendationServiceLevelObjectiveId(disasterPlanBasedRecommendationServiceLevelObjectiveIdInstance); } JsonNode overallRecommendationServiceLevelObjectiveValue = propertiesValue.get("overallRecommendationServiceLevelObjective"); if (overallRecommendationServiceLevelObjectiveValue != null && overallRecommendationServiceLevelObjectiveValue instanceof NullNode == false) { String overallRecommendationServiceLevelObjectiveInstance; overallRecommendationServiceLevelObjectiveInstance = overallRecommendationServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setOverallRecommendationServiceLevelObjective(overallRecommendationServiceLevelObjectiveInstance); } JsonNode overallRecommendationServiceLevelObjectiveIdValue = propertiesValue.get("overallRecommendationServiceLevelObjectiveId"); if (overallRecommendationServiceLevelObjectiveIdValue != null && overallRecommendationServiceLevelObjectiveIdValue instanceof NullNode == false) { String overallRecommendationServiceLevelObjectiveIdInstance; overallRecommendationServiceLevelObjectiveIdInstance = overallRecommendationServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setOverallRecommendationServiceLevelObjectiveId(overallRecommendationServiceLevelObjectiveIdInstance); } JsonNode confidenceValue = propertiesValue.get("confidence"); if (confidenceValue != null && confidenceValue instanceof NullNode == false) { double confidenceInstance; confidenceInstance = confidenceValue.getDoubleValue(); propertiesInstance.setConfidence(confidenceInstance); } } JsonNode idValue2 = responseDoc.get("id"); if (idValue2 != null && idValue2 instanceof NullNode == false) { String idInstance2; idInstance2 = idValue2.getTextValue(); serviceTierAdvisorInstance.setId(idInstance2); } JsonNode nameValue2 = responseDoc.get("name"); if (nameValue2 != null && nameValue2 instanceof NullNode == false) { String nameInstance2; nameInstance2 = nameValue2.getTextValue(); serviceTierAdvisorInstance.setName(nameInstance2); } JsonNode typeValue2 = responseDoc.get("type"); if (typeValue2 != null && typeValue2 instanceof NullNode == false) { String typeInstance2; typeInstance2 = typeValue2.getTextValue(); serviceTierAdvisorInstance.setType(typeInstance2); } JsonNode locationValue2 = responseDoc.get("location"); if (locationValue2 != null && locationValue2 instanceof NullNode == false) { String locationInstance2; locationInstance2 = locationValue2.getTextValue(); serviceTierAdvisorInstance.setLocation(locationInstance2); } JsonNode tagsSequenceElement2 = ((JsonNode) responseDoc.get("tags")); if (tagsSequenceElement2 != null && tagsSequenceElement2 instanceof NullNode == false) { Iterator<Map.Entry<String, JsonNode>> itr2 = tagsSequenceElement2.getFields(); while (itr2.hasNext()) { Map.Entry<String, JsonNode> property2 = itr2.next(); String tagsKey2 = property2.getKey(); String tagsValue2 = property2.getValue().getTextValue(); serviceTierAdvisorInstance.getTags().put(tagsKey2, tagsValue2); } } } } result.setStatusCode(statusCode); if (httpResponse.getHeaders("x-ms-request-id").length > 0) { result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } /** * Returns information about service tier advisors for specified database. * * @param resourceGroupName Required. The name of the Resource Group. * @param serverName Required. The name of server. * @param databaseName Required. The name of database. * @return Represents the response to a list service tier advisor request. */ @Override public Future<ServiceTierAdvisorListResponse> listAsync(final String resourceGroupName, final String serverName, final String databaseName) { return this.getClient().getExecutorService().submit(new Callable<ServiceTierAdvisorListResponse>() { @Override public ServiceTierAdvisorListResponse call() throws Exception { return list(resourceGroupName, serverName, databaseName); } }); } /** * Returns information about service tier advisors for specified database. * * @param resourceGroupName Required. The name of the Resource Group. * @param serverName Required. The name of server. * @param databaseName Required. The name of database. * @throws IOException Signals that an I/O exception of some sort has * occurred. This class is the general class of exceptions produced by * failed or interrupted I/O operations. * @throws ServiceException Thrown if an unexpected response is found. * @return Represents the response to a list service tier advisor request. */ @Override public ServiceTierAdvisorListResponse list(String resourceGroupName, String serverName, String databaseName) throws IOException, ServiceException { // Validate if (resourceGroupName == null) { throw new NullPointerException("resourceGroupName"); } if (serverName == null) { throw new NullPointerException("serverName"); } if (databaseName == null) { throw new NullPointerException("databaseName"); } // Tracing boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("resourceGroupName", resourceGroupName); tracingParameters.put("serverName", serverName); tracingParameters.put("databaseName", databaseName); CloudTracing.enter(invocationId, this, "listAsync", tracingParameters); } // Construct URL String url = ""; url = url + "/subscriptions/"; if (this.getClient().getCredentials().getSubscriptionId() != null) { url = url + URLEncoder.encode(this.getClient().getCredentials().getSubscriptionId(), "UTF-8"); } url = url + "/resourceGroups/"; url = url + URLEncoder.encode(resourceGroupName, "UTF-8"); url = url + "/providers/"; url = url + "Microsoft.Sql"; url = url + "/servers/"; url = url + URLEncoder.encode(serverName, "UTF-8"); url = url + "/databases/"; url = url + URLEncoder.encode(databaseName, "UTF-8"); url = url + "/serviceTierAdvisors"; ArrayList<String> queryParameters = new ArrayList<String>(); queryParameters.add("api-version=" + "2014-04-01"); if (queryParameters.size() > 0) { url = url + "?" + CollectionStringBuilder.join(queryParameters, "&"); } String baseUrl = this.getClient().getBaseUri().toString(); // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl.charAt(baseUrl.length() - 1) == '/') { baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0); } if (url.charAt(0) == '/') { url = url.substring(1); } url = baseUrl + "/" + url; url = url.replace(" ", "%20"); // Create HTTP transport objects HttpGet httpRequest = new HttpGet(url); // Set Headers // Send Request HttpResponse httpResponse = null; try { if (shouldTrace) { CloudTracing.sendRequest(invocationId, httpRequest); } httpResponse = this.getClient().getHttpClient().execute(httpRequest); if (shouldTrace) { CloudTracing.receiveResponse(invocationId, httpResponse); } int statusCode = httpResponse.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { ServiceException ex = ServiceException.createFromJson(httpRequest, null, httpResponse, httpResponse.getEntity()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } // Create Result ServiceTierAdvisorListResponse result = null; // Deserialize Response if (statusCode == HttpStatus.SC_OK) { InputStream responseContent = httpResponse.getEntity().getContent(); result = new ServiceTierAdvisorListResponse(); ObjectMapper objectMapper = new ObjectMapper(); JsonNode responseDoc = null; if (responseContent == null == false) { responseDoc = objectMapper.readTree(responseContent); } if (responseDoc != null && responseDoc instanceof NullNode == false) { JsonNode valueArray = responseDoc.get("value"); if (valueArray != null && valueArray instanceof NullNode == false) { for (JsonNode valueValue : ((ArrayNode) valueArray)) { ServiceTierAdvisor serviceTierAdvisorInstance = new ServiceTierAdvisor(); result.getServiceTierAdvisors().add(serviceTierAdvisorInstance); JsonNode propertiesValue = valueValue.get("properties"); if (propertiesValue != null && propertiesValue instanceof NullNode == false) { ServiceTierAdvisorProperties propertiesInstance = new ServiceTierAdvisorProperties(); serviceTierAdvisorInstance.setProperties(propertiesInstance); JsonNode observationPeriodStartValue = propertiesValue.get("observationPeriodStart"); if (observationPeriodStartValue != null && observationPeriodStartValue instanceof NullNode == false) { Calendar observationPeriodStartInstance; observationPeriodStartInstance = DatatypeConverter.parseDateTime(observationPeriodStartValue.getTextValue()); propertiesInstance.setObservationPeriodStart(observationPeriodStartInstance); } JsonNode observationPeriodEndValue = propertiesValue.get("observationPeriodEnd"); if (observationPeriodEndValue != null && observationPeriodEndValue instanceof NullNode == false) { Calendar observationPeriodEndInstance; observationPeriodEndInstance = DatatypeConverter.parseDateTime(observationPeriodEndValue.getTextValue()); propertiesInstance.setObservationPeriodEnd(observationPeriodEndInstance); } JsonNode activeTimeRatioValue = propertiesValue.get("activeTimeRatio"); if (activeTimeRatioValue != null && activeTimeRatioValue instanceof NullNode == false) { double activeTimeRatioInstance; activeTimeRatioInstance = activeTimeRatioValue.getDoubleValue(); propertiesInstance.setActiveTimeRatio(activeTimeRatioInstance); } JsonNode minDtuValue = propertiesValue.get("minDtu"); if (minDtuValue != null && minDtuValue instanceof NullNode == false) { double minDtuInstance; minDtuInstance = minDtuValue.getDoubleValue(); propertiesInstance.setMinDtu(minDtuInstance); } JsonNode avgDtuValue = propertiesValue.get("avgDtu"); if (avgDtuValue != null && avgDtuValue instanceof NullNode == false) { double avgDtuInstance; avgDtuInstance = avgDtuValue.getDoubleValue(); propertiesInstance.setAvgDtu(avgDtuInstance); } JsonNode maxDtuValue = propertiesValue.get("maxDtu"); if (maxDtuValue != null && maxDtuValue instanceof NullNode == false) { double maxDtuInstance; maxDtuInstance = maxDtuValue.getDoubleValue(); propertiesInstance.setMaxDtu(maxDtuInstance); } JsonNode maxSizeInGBValue = propertiesValue.get("maxSizeInGB"); if (maxSizeInGBValue != null && maxSizeInGBValue instanceof NullNode == false) { double maxSizeInGBInstance; maxSizeInGBInstance = maxSizeInGBValue.getDoubleValue(); propertiesInstance.setMaxSizeInGB(maxSizeInGBInstance); } JsonNode serviceLevelObjectiveUsageMetricsArray = propertiesValue.get("serviceLevelObjectiveUsageMetrics"); if (serviceLevelObjectiveUsageMetricsArray != null && serviceLevelObjectiveUsageMetricsArray instanceof NullNode == false) { for (JsonNode serviceLevelObjectiveUsageMetricsValue : ((ArrayNode) serviceLevelObjectiveUsageMetricsArray)) { SloUsageMetric sloUsageMetricInstance = new SloUsageMetric(); propertiesInstance.getServiceLevelObjectiveUsageMetrics().add(sloUsageMetricInstance); JsonNode serviceLevelObjectiveValue = serviceLevelObjectiveUsageMetricsValue.get("serviceLevelObjective"); if (serviceLevelObjectiveValue != null && serviceLevelObjectiveValue instanceof NullNode == false) { String serviceLevelObjectiveInstance; serviceLevelObjectiveInstance = serviceLevelObjectiveValue.getTextValue(); sloUsageMetricInstance.setServiceLevelObjective(serviceLevelObjectiveInstance); } JsonNode serviceLevelObjectiveIdValue = serviceLevelObjectiveUsageMetricsValue.get("serviceLevelObjectiveId"); if (serviceLevelObjectiveIdValue != null && serviceLevelObjectiveIdValue instanceof NullNode == false) { String serviceLevelObjectiveIdInstance; serviceLevelObjectiveIdInstance = serviceLevelObjectiveIdValue.getTextValue(); sloUsageMetricInstance.setServiceLevelObjectiveId(serviceLevelObjectiveIdInstance); } JsonNode inRangeTimeRatioValue = serviceLevelObjectiveUsageMetricsValue.get("inRangeTimeRatio"); if (inRangeTimeRatioValue != null && inRangeTimeRatioValue instanceof NullNode == false) { double inRangeTimeRatioInstance; inRangeTimeRatioInstance = inRangeTimeRatioValue.getDoubleValue(); sloUsageMetricInstance.setInRangeTimeRatio(inRangeTimeRatioInstance); } JsonNode idValue = serviceLevelObjectiveUsageMetricsValue.get("id"); if (idValue != null && idValue instanceof NullNode == false) { String idInstance; idInstance = idValue.getTextValue(); sloUsageMetricInstance.setId(idInstance); } JsonNode nameValue = serviceLevelObjectiveUsageMetricsValue.get("name"); if (nameValue != null && nameValue instanceof NullNode == false) { String nameInstance; nameInstance = nameValue.getTextValue(); sloUsageMetricInstance.setName(nameInstance); } JsonNode typeValue = serviceLevelObjectiveUsageMetricsValue.get("type"); if (typeValue != null && typeValue instanceof NullNode == false) { String typeInstance; typeInstance = typeValue.getTextValue(); sloUsageMetricInstance.setType(typeInstance); } JsonNode locationValue = serviceLevelObjectiveUsageMetricsValue.get("location"); if (locationValue != null && locationValue instanceof NullNode == false) { String locationInstance; locationInstance = locationValue.getTextValue(); sloUsageMetricInstance.setLocation(locationInstance); } JsonNode tagsSequenceElement = ((JsonNode) serviceLevelObjectiveUsageMetricsValue.get("tags")); if (tagsSequenceElement != null && tagsSequenceElement instanceof NullNode == false) { Iterator<Map.Entry<String, JsonNode>> itr = tagsSequenceElement.getFields(); while (itr.hasNext()) { Map.Entry<String, JsonNode> property = itr.next(); String tagsKey = property.getKey(); String tagsValue = property.getValue().getTextValue(); sloUsageMetricInstance.getTags().put(tagsKey, tagsValue); } } } } JsonNode currentServiceLevelObjectiveValue = propertiesValue.get("currentServiceLevelObjective"); if (currentServiceLevelObjectiveValue != null && currentServiceLevelObjectiveValue instanceof NullNode == false) { String currentServiceLevelObjectiveInstance; currentServiceLevelObjectiveInstance = currentServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setCurrentServiceLevelObjective(currentServiceLevelObjectiveInstance); } JsonNode currentServiceLevelObjectiveIdValue = propertiesValue.get("currentServiceLevelObjectiveId"); if (currentServiceLevelObjectiveIdValue != null && currentServiceLevelObjectiveIdValue instanceof NullNode == false) { String currentServiceLevelObjectiveIdInstance; currentServiceLevelObjectiveIdInstance = currentServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setCurrentServiceLevelObjectiveId(currentServiceLevelObjectiveIdInstance); } JsonNode usageBasedRecommendationServiceLevelObjectiveValue = propertiesValue.get("usageBasedRecommendationServiceLevelObjective"); if (usageBasedRecommendationServiceLevelObjectiveValue != null && usageBasedRecommendationServiceLevelObjectiveValue instanceof NullNode == false) { String usageBasedRecommendationServiceLevelObjectiveInstance; usageBasedRecommendationServiceLevelObjectiveInstance = usageBasedRecommendationServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setUsageBasedRecommendationServiceLevelObjective(usageBasedRecommendationServiceLevelObjectiveInstance); } JsonNode usageBasedRecommendationServiceLevelObjectiveIdValue = propertiesValue.get("usageBasedRecommendationServiceLevelObjectiveId"); if (usageBasedRecommendationServiceLevelObjectiveIdValue != null && usageBasedRecommendationServiceLevelObjectiveIdValue instanceof NullNode == false) { String usageBasedRecommendationServiceLevelObjectiveIdInstance; usageBasedRecommendationServiceLevelObjectiveIdInstance = usageBasedRecommendationServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setUsageBasedRecommendationServiceLevelObjectiveId(usageBasedRecommendationServiceLevelObjectiveIdInstance); } JsonNode databaseSizeBasedRecommendationServiceLevelObjectiveValue = propertiesValue.get("databaseSizeBasedRecommendationServiceLevelObjective"); if (databaseSizeBasedRecommendationServiceLevelObjectiveValue != null && databaseSizeBasedRecommendationServiceLevelObjectiveValue instanceof NullNode == false) { String databaseSizeBasedRecommendationServiceLevelObjectiveInstance; databaseSizeBasedRecommendationServiceLevelObjectiveInstance = databaseSizeBasedRecommendationServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setDatabaseSizeBasedRecommendationServiceLevelObjective(databaseSizeBasedRecommendationServiceLevelObjectiveInstance); } JsonNode databaseSizeBasedRecommendationServiceLevelObjectiveIdValue = propertiesValue.get("databaseSizeBasedRecommendationServiceLevelObjectiveId"); if (databaseSizeBasedRecommendationServiceLevelObjectiveIdValue != null && databaseSizeBasedRecommendationServiceLevelObjectiveIdValue instanceof NullNode == false) { String databaseSizeBasedRecommendationServiceLevelObjectiveIdInstance; databaseSizeBasedRecommendationServiceLevelObjectiveIdInstance = databaseSizeBasedRecommendationServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setDatabaseSizeBasedRecommendationServiceLevelObjectiveId(databaseSizeBasedRecommendationServiceLevelObjectiveIdInstance); } JsonNode disasterPlanBasedRecommendationServiceLevelObjectiveValue = propertiesValue.get("disasterPlanBasedRecommendationServiceLevelObjective"); if (disasterPlanBasedRecommendationServiceLevelObjectiveValue != null && disasterPlanBasedRecommendationServiceLevelObjectiveValue instanceof NullNode == false) { String disasterPlanBasedRecommendationServiceLevelObjectiveInstance; disasterPlanBasedRecommendationServiceLevelObjectiveInstance = disasterPlanBasedRecommendationServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setDisasterPlanBasedRecommendationServiceLevelObjective(disasterPlanBasedRecommendationServiceLevelObjectiveInstance); } JsonNode disasterPlanBasedRecommendationServiceLevelObjectiveIdValue = propertiesValue.get("disasterPlanBasedRecommendationServiceLevelObjectiveId"); if (disasterPlanBasedRecommendationServiceLevelObjectiveIdValue != null && disasterPlanBasedRecommendationServiceLevelObjectiveIdValue instanceof NullNode == false) { String disasterPlanBasedRecommendationServiceLevelObjectiveIdInstance; disasterPlanBasedRecommendationServiceLevelObjectiveIdInstance = disasterPlanBasedRecommendationServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setDisasterPlanBasedRecommendationServiceLevelObjectiveId(disasterPlanBasedRecommendationServiceLevelObjectiveIdInstance); } JsonNode overallRecommendationServiceLevelObjectiveValue = propertiesValue.get("overallRecommendationServiceLevelObjective"); if (overallRecommendationServiceLevelObjectiveValue != null && overallRecommendationServiceLevelObjectiveValue instanceof NullNode == false) { String overallRecommendationServiceLevelObjectiveInstance; overallRecommendationServiceLevelObjectiveInstance = overallRecommendationServiceLevelObjectiveValue.getTextValue(); propertiesInstance.setOverallRecommendationServiceLevelObjective(overallRecommendationServiceLevelObjectiveInstance); } JsonNode overallRecommendationServiceLevelObjectiveIdValue = propertiesValue.get("overallRecommendationServiceLevelObjectiveId"); if (overallRecommendationServiceLevelObjectiveIdValue != null && overallRecommendationServiceLevelObjectiveIdValue instanceof NullNode == false) { String overallRecommendationServiceLevelObjectiveIdInstance; overallRecommendationServiceLevelObjectiveIdInstance = overallRecommendationServiceLevelObjectiveIdValue.getTextValue(); propertiesInstance.setOverallRecommendationServiceLevelObjectiveId(overallRecommendationServiceLevelObjectiveIdInstance); } JsonNode confidenceValue = propertiesValue.get("confidence"); if (confidenceValue != null && confidenceValue instanceof NullNode == false) { double confidenceInstance; confidenceInstance = confidenceValue.getDoubleValue(); propertiesInstance.setConfidence(confidenceInstance); } } JsonNode idValue2 = valueValue.get("id"); if (idValue2 != null && idValue2 instanceof NullNode == false) { String idInstance2; idInstance2 = idValue2.getTextValue(); serviceTierAdvisorInstance.setId(idInstance2); } JsonNode nameValue2 = valueValue.get("name"); if (nameValue2 != null && nameValue2 instanceof NullNode == false) { String nameInstance2; nameInstance2 = nameValue2.getTextValue(); serviceTierAdvisorInstance.setName(nameInstance2); } JsonNode typeValue2 = valueValue.get("type"); if (typeValue2 != null && typeValue2 instanceof NullNode == false) { String typeInstance2; typeInstance2 = typeValue2.getTextValue(); serviceTierAdvisorInstance.setType(typeInstance2); } JsonNode locationValue2 = valueValue.get("location"); if (locationValue2 != null && locationValue2 instanceof NullNode == false) { String locationInstance2; locationInstance2 = locationValue2.getTextValue(); serviceTierAdvisorInstance.setLocation(locationInstance2); } JsonNode tagsSequenceElement2 = ((JsonNode) valueValue.get("tags")); if (tagsSequenceElement2 != null && tagsSequenceElement2 instanceof NullNode == false) { Iterator<Map.Entry<String, JsonNode>> itr2 = tagsSequenceElement2.getFields(); while (itr2.hasNext()) { Map.Entry<String, JsonNode> property2 = itr2.next(); String tagsKey2 = property2.getKey(); String tagsValue2 = property2.getValue().getTextValue(); serviceTierAdvisorInstance.getTags().put(tagsKey2, tagsValue2); } } } } } } result.setStatusCode(statusCode); if (httpResponse.getHeaders("x-ms-request-id").length > 0) { result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.spatial; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.geo.GeoFormatterFactory; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.ingest.Processor; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.CardinalityAggregator; import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregator; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.spatial.action.SpatialStatsAction; import org.elasticsearch.xpack.spatial.action.SpatialInfoTransportAction; import org.elasticsearch.xpack.spatial.action.SpatialStatsTransportAction; import org.elasticsearch.xpack.spatial.action.SpatialUsageTransportAction; import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeWithDocValuesFieldMapper; import org.elasticsearch.xpack.spatial.index.mapper.PointFieldMapper; import org.elasticsearch.xpack.spatial.index.mapper.ShapeFieldMapper; import org.elasticsearch.xpack.spatial.index.query.ShapeQueryBuilder; import org.elasticsearch.xpack.spatial.ingest.CircleProcessor; import org.elasticsearch.xpack.spatial.search.aggregations.GeoLineAggregationBuilder; import org.elasticsearch.xpack.spatial.search.aggregations.InternalGeoLine; import org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid.BoundedGeoHashGridTiler; import org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid.BoundedGeoTileGridTiler; import org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid.GeoGridTiler; import org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid.GeoShapeCellIdSource; import org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid.GeoShapeHashGridAggregator; import org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid.GeoShapeTileGridAggregator; import org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid.UnboundedGeoHashGridTiler; import org.elasticsearch.xpack.spatial.search.aggregations.bucket.geogrid.UnboundedGeoTileGridTiler; import org.elasticsearch.xpack.spatial.search.aggregations.metrics.GeoShapeBoundsAggregator; import org.elasticsearch.xpack.spatial.search.aggregations.metrics.GeoShapeCentroidAggregator; import org.elasticsearch.xpack.spatial.search.aggregations.support.GeoShapeValuesSource; import org.elasticsearch.xpack.spatial.search.aggregations.support.GeoShapeValuesSourceType; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.function.Consumer; import static java.util.Collections.singletonList; public class SpatialPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin, IngestPlugin, ExtensiblePlugin { private final SpatialUsage usage = new SpatialUsage(); private final LicensedFeature.Momentary GEO_CENTROID_AGG_FEATURE = LicensedFeature.momentary( "spatial", "geo-centroid-agg", License.OperationMode.GOLD ); private final LicensedFeature.Momentary GEO_GRID_AGG_FEATURE = LicensedFeature.momentary( "spatial", "geo-grid-agg", License.OperationMode.GOLD ); private final LicensedFeature.Momentary GEO_LINE_AGG_FEATURE = LicensedFeature.momentary( "spatial", "geo-line-agg", License.OperationMode.GOLD ); // to be overriden by tests protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } // register the vector tile factory from a different module private final SetOnce<GeoFormatterFactory<Geometry>> geoFormatterFactory = new SetOnce<>(); @Override public List<ActionPlugin.ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() { return Arrays.asList( new ActionPlugin.ActionHandler<>(XPackUsageFeatureAction.SPATIAL, SpatialUsageTransportAction.class), new ActionPlugin.ActionHandler<>(XPackInfoFeatureAction.SPATIAL, SpatialInfoTransportAction.class), new ActionPlugin.ActionHandler<>(SpatialStatsAction.INSTANCE, SpatialStatsTransportAction.class) ); } @Override public Map<String, Mapper.TypeParser> getMappers() { return Map.of( ShapeFieldMapper.CONTENT_TYPE, ShapeFieldMapper.PARSER, PointFieldMapper.CONTENT_TYPE, PointFieldMapper.PARSER, GeoShapeWithDocValuesFieldMapper.CONTENT_TYPE, new GeoShapeWithDocValuesFieldMapper.TypeParser(geoFormatterFactory.get()) ); } @Override public List<QuerySpec<?>> getQueries() { return singletonList(new QuerySpec<>(ShapeQueryBuilder.NAME, ShapeQueryBuilder::new, ShapeQueryBuilder::fromXContent)); } @Override public List<Consumer<ValuesSourceRegistry.Builder>> getAggregationExtentions() { return List.of( this::registerGeoShapeCentroidAggregator, this::registerGeoShapeGridAggregators, SpatialPlugin::registerGeoShapeBoundsAggregator, SpatialPlugin::registerValueCountAggregator, SpatialPlugin::registerCardinalityAggregator ); } @Override public List<AggregationSpec> getAggregations() { return List.of( new AggregationSpec( GeoLineAggregationBuilder.NAME, GeoLineAggregationBuilder::new, usage.track(SpatialStatsAction.Item.GEOLINE, checkLicense(GeoLineAggregationBuilder.PARSER, GEO_LINE_AGG_FEATURE)) ).addResultReader(InternalGeoLine::new).setAggregatorRegistrar(GeoLineAggregationBuilder::registerUsage) ); } @Override public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) { return Map.of(CircleProcessor.TYPE, new CircleProcessor.Factory()); } private static void registerGeoShapeBoundsAggregator(ValuesSourceRegistry.Builder builder) { builder.register( GeoBoundsAggregationBuilder.REGISTRY_KEY, GeoShapeValuesSourceType.instance(), GeoShapeBoundsAggregator::new, true ); } private void registerGeoShapeCentroidAggregator(ValuesSourceRegistry.Builder builder) { builder.register( GeoCentroidAggregationBuilder.REGISTRY_KEY, GeoShapeValuesSourceType.instance(), (name, valuesSourceConfig, context, parent, metadata) -> { if (GEO_CENTROID_AGG_FEATURE.check(getLicenseState())) { return new GeoShapeCentroidAggregator(name, context, parent, valuesSourceConfig, metadata); } throw LicenseUtils.newComplianceException("geo_centroid aggregation on geo_shape fields"); }, true ); } private void registerGeoShapeGridAggregators(ValuesSourceRegistry.Builder builder) { builder.register( GeoHashGridAggregationBuilder.REGISTRY_KEY, GeoShapeValuesSourceType.instance(), ( name, factories, valuesSource, precision, geoBoundingBox, requiredSize, shardSize, aggregationContext, parent, collectsFromSingleBucket, metadata) -> { if (GEO_GRID_AGG_FEATURE.check(getLicenseState())) { final GeoGridTiler tiler; if (geoBoundingBox.isUnbounded()) { tiler = new UnboundedGeoHashGridTiler(precision); } else { tiler = new BoundedGeoHashGridTiler(precision, geoBoundingBox); } GeoShapeCellIdSource cellIdSource = new GeoShapeCellIdSource((GeoShapeValuesSource) valuesSource, tiler); GeoShapeHashGridAggregator agg = new GeoShapeHashGridAggregator( name, factories, cellIdSource, requiredSize, shardSize, aggregationContext, parent, collectsFromSingleBucket, metadata ); // this would ideally be something set in an immutable way on the ValuesSource cellIdSource.setCircuitBreakerConsumer(agg::addRequestBytes); return agg; } throw LicenseUtils.newComplianceException("geohash_grid aggregation on geo_shape fields"); }, true ); builder.register( GeoTileGridAggregationBuilder.REGISTRY_KEY, GeoShapeValuesSourceType.instance(), ( name, factories, valuesSource, precision, geoBoundingBox, requiredSize, shardSize, context, parent, collectsFromSingleBucket, metadata) -> { if (GEO_GRID_AGG_FEATURE.check(getLicenseState())) { final GeoGridTiler tiler; if (geoBoundingBox.isUnbounded()) { tiler = new UnboundedGeoTileGridTiler(precision); } else { tiler = new BoundedGeoTileGridTiler(precision, geoBoundingBox); } GeoShapeCellIdSource cellIdSource = new GeoShapeCellIdSource((GeoShapeValuesSource) valuesSource, tiler); GeoShapeTileGridAggregator agg = new GeoShapeTileGridAggregator( name, factories, cellIdSource, requiredSize, shardSize, context, parent, collectsFromSingleBucket, metadata ); // this would ideally be something set in an immutable way on the ValuesSource cellIdSource.setCircuitBreakerConsumer(agg::addRequestBytes); return agg; } throw LicenseUtils.newComplianceException("geotile_grid aggregation on geo_shape fields"); }, true ); } private static void registerValueCountAggregator(ValuesSourceRegistry.Builder builder) { builder.register(ValueCountAggregationBuilder.REGISTRY_KEY, GeoShapeValuesSourceType.instance(), ValueCountAggregator::new, true); } private static void registerCardinalityAggregator(ValuesSourceRegistry.Builder builder) { builder.register(CardinalityAggregationBuilder.REGISTRY_KEY, GeoShapeValuesSourceType.instance(), CardinalityAggregator::new, true); } private <T> ContextParser<String, T> checkLicense(ContextParser<String, T> realParser, LicensedFeature.Momentary feature) { return (parser, name) -> { if (feature.check(getLicenseState()) == false) { throw LicenseUtils.newComplianceException(feature.getName()); } return realParser.parse(parser, name); }; } @Override public void loadExtensions(ExtensionLoader loader) { // we only expect one vector tile extension that comes from the vector tile module. List<GeoFormatterFactory.FormatterFactory<Geometry>> formatterFactories = new ArrayList<>(); loader.loadExtensions(GeometryFormatterExtension.class) .stream() .map(GeometryFormatterExtension::getGeometryFormatterFactories) .forEach(formatterFactories::addAll); geoFormatterFactory.set(new GeoFormatterFactory<>(formatterFactories)); } }
/* * Copyright (C) 2003, 2004, 2005, 2006 Joe Walnes. * Copyright (C) 2006, 2007, 2011, 2014 XStream Committers. * All rights reserved. * * The software in this package is published under the terms of the BSD * style license a copy of which has been included with this distribution in * the LICENSE.txt file. * * Created on 26. September 2003 by Joe Walnes */ package com.thoughtworks.xstream; import com.thoughtworks.acceptance.AbstractAcceptanceTest; import com.thoughtworks.acceptance.objects.StandardObject; import com.thoughtworks.acceptance.someobjects.FunnyConstructor; import com.thoughtworks.acceptance.someobjects.Handler; import com.thoughtworks.acceptance.someobjects.HandlerManager; import com.thoughtworks.acceptance.someobjects.Protocol; import com.thoughtworks.acceptance.someobjects.U; import com.thoughtworks.acceptance.someobjects.WithList; import com.thoughtworks.acceptance.someobjects.X; import com.thoughtworks.acceptance.someobjects.Y; import com.thoughtworks.xstream.converters.Converter; import com.thoughtworks.xstream.converters.MarshallingContext; import com.thoughtworks.xstream.converters.UnmarshallingContext; import com.thoughtworks.xstream.core.JVM; import com.thoughtworks.xstream.io.HierarchicalStreamReader; import com.thoughtworks.xstream.io.HierarchicalStreamWriter; import com.thoughtworks.xstream.io.StreamException; import com.thoughtworks.xstream.io.xml.AbstractDocumentReader; import com.thoughtworks.xstream.io.xml.Dom4JDriver; import com.thoughtworks.xstream.security.NoTypePermission; import junit.framework.TestCase; import org.dom4j.Element; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.StringReader; import java.io.StringWriter; import java.io.UnsupportedEncodingException; public class XStreamTest extends TestCase { private transient XStream xstream; protected void setUp() throws Exception { super.setUp(); xstream = new XStream(); xstream.addPermission(NoTypePermission.NONE); // clear out defaults xstream.allowTypesByWildcard(new String[]{ AbstractAcceptanceTest.class.getPackage().getName()+".*objects.**", this.getClass().getName()+"$*" }); xstream.alias("x", X.class); xstream.alias("y", Y.class); xstream.alias("funny", FunnyConstructor.class); xstream.alias("with-list", WithList.class); } public void testUnmarshalsObjectFromXmlWithUnderscores() { String xml = "<u-u>" + " <u-f>foo</u-f>" + " <u_f>_foo</u_f>" + "</u-u>"; xstream.alias("u-u", U.class); xstream.aliasField("u-f", U.class, "aStr"); xstream.aliasField("u_f", U.class, "a_Str"); U u = (U) xstream.fromXML(xml); assertEquals("foo", u.aStr); assertEquals("_foo", u.a_Str); } public void testUnmarshalsObjectFromXmlWithClassContainingUnderscores() { String xml = "<com.thoughtworks.xstream.XStreamTest_-U_U>" + " <aStr>custom value</aStr>" + "</com.thoughtworks.xstream.XStreamTest_-U_U>"; U_U u = (U_U) xstream.fromXML(xml); assertEquals("custom value", u.aStr); } public void testUnmarshalsObjectFromXmlWithUnderscoresWithoutAliasingFields() { String xml = "<u-u>" + " <a_Str>custom value</a_Str>" + "</u-u>"; xstream.alias("u-u", U.class); U u = (U) xstream.fromXML(xml); assertEquals("custom value", u.a_Str); } public static class U_U { String aStr; } public void testUnmarshalsObjectFromXml() { String xml = "<x>" + " <aStr>joe</aStr>" + " <anInt>8</anInt>" + " <innerObj>" + " <yField>walnes</yField>" + " </innerObj>" + "</x>"; X x = (X) xstream.fromXML(xml); assertEquals("joe", x.aStr); assertEquals(8, x.anInt); assertEquals("walnes", x.innerObj.yField); } public void testMarshalsObjectToXml() { X x = new X(); x.anInt = 9; x.aStr = "zzz"; x.innerObj = new Y(); x.innerObj.yField = "ooo"; String expected = "<x>\n" + " <aStr>zzz</aStr>\n" + " <anInt>9</anInt>\n" + " <innerObj>\n" + " <yField>ooo</yField>\n" + " </innerObj>\n" + "</x>"; assertEquals(xstream.fromXML(expected), x); } public void testUnmarshalsClassWithoutDefaultConstructor() { if (!JVM.is14()) return; String xml = "<funny>" + " <i>999</i>" + "</funny>"; FunnyConstructor funnyConstructor = (FunnyConstructor) xstream.fromXML(xml); assertEquals(999, funnyConstructor.i); } public void testHandlesLists() { WithList original = new WithList(); Y y = new Y(); y.yField = "a"; original.things.add(y); original.things.add(new X(3)); original.things.add(new X(1)); String xml = xstream.toXML(original); String expected = "<with-list>\n" + " <things>\n" + " <y>\n" + " <yField>a</yField>\n" + " </y>\n" + " <x>\n" + " <anInt>3</anInt>\n" + " </x>\n" + " <x>\n" + " <anInt>1</anInt>\n" + " </x>\n" + " </things>\n" + "</with-list>"; assertEquals(expected, xml); WithList result = (WithList) xstream.fromXML(xml); assertEquals(original, result); } public void testCanHandleNonStaticPrivateInnerClass() { if (!JVM.is14()) return; NonStaticInnerClass obj = new NonStaticInnerClass(); obj.field = 3; xstream.alias("inner", NonStaticInnerClass.class); String xml = xstream.toXML(obj); String expected = "" + "<inner>\n" + " <field>3</field>\n" + " <outer-class>\n" + " <fName>testCanHandleNonStaticPrivateInnerClass</fName>\n" + " </outer-class>\n" + "</inner>"; assertEquals(xstream.fromXML(expected), obj); NonStaticInnerClass result = (NonStaticInnerClass) xstream.fromXML(xml); assertEquals(obj.field, result.field); } public void testClassWithoutMappingUsesFullyQualifiedName() { Person obj = new Person(); String xml = xstream.toXML(obj); String expected = "<com.thoughtworks.xstream.XStreamTest_-Person/>"; assertEquals(expected, xml); Person result = (Person) xstream.fromXML(xml); assertEquals(obj, result); } private class NonStaticInnerClass extends StandardObject { int field; } public void testCanBeBeUsedMultipleTimesWithSameInstance() { Y obj = new Y(); obj.yField = "x"; assertEquals(xstream.toXML(obj), xstream.toXML(obj)); } public void testAccessToUnderlyingDom4JImplementation() throws Exception { String xml = "<person>" + " <firstName>jason</firstName>" + " <lastName>van Zyl</lastName>" + " <element>" + " <foo>bar</foo>" + " </element>" + "</person>"; xstream.registerConverter(new ElementConverter()); xstream.alias("person", Person.class); Dom4JDriver driver = new Dom4JDriver(); Person person = (Person) xstream.unmarshal(driver.createReader(new StringReader(xml))); assertEquals("jason", person.firstName); assertEquals("van Zyl", person.lastName); assertNotNull(person.element); assertEquals("bar", person.element.element("foo").getText()); } public static class Person extends StandardObject { String firstName; String lastName; Element element; } private class ElementConverter implements Converter { public boolean canConvert(Class type) { return Element.class.isAssignableFrom(type); } public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { } public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { AbstractDocumentReader documentReader = (AbstractDocumentReader)reader.underlyingReader(); Element element = (Element) documentReader.getCurrent(); while (reader.hasMoreChildren()) { reader.moveDown(); reader.moveUp(); } return element; } } public void testPopulationOfAnObjectGraphStartingWithALiveRootObject() throws Exception { String xml = "<component>" + " <host>host</host>" + " <port>8000</port>" + "</component>"; xstream.alias("component", Component.class); Component component0 = new Component(); Component component1 = (Component) xstream.fromXML(xml, component0); assertSame(component0, component1); assertEquals("host", component0.host); assertEquals(8000, component0.port); } static class Component { String host; int port; } public void testPopulationOfThisAsRootObject() throws Exception { String xml ="" + "<component>\n" + " <host>host</host>\n" + " <port>8000</port>\n" + "</component>"; xstream.alias("component", SelfSerializingComponent.class); SelfSerializingComponent component = new SelfSerializingComponent(); component.host = "host"; component.port = 8000; assertEquals(xml, component.toXML(xstream)); component.host = "foo"; component.port = -1; component.fromXML(xstream, xml); assertEquals("host", component.host); assertEquals(8000, component.port); } static class SelfSerializingComponent extends Component { String toXML(XStream xstream) { return xstream.toXML(this); } void fromXML(XStream xstream, String xml) { xstream.fromXML(xml, this); } } public void testUnmarshalsWhenAllImplementationsAreSpecifiedUsingAClassIdentifier() throws Exception { String xml = "<handlerManager class='com.thoughtworks.acceptance.someobjects.HandlerManager'>" + " <handlers>" + " <handler class='com.thoughtworks.acceptance.someobjects.Handler'>" + " <protocol class='com.thoughtworks.acceptance.someobjects.Protocol'>" + " <id>foo</id> " + " </protocol> " + " </handler>" + " </handlers>" + "</handlerManager>"; HandlerManager hm = (HandlerManager) xstream.fromXML(xml); Handler h = (Handler) hm.getHandlers().get(0); Protocol p = h.getProtocol(); assertEquals("foo", p.getId()); } public void testObjectOutputStreamCloseTwice() throws IOException { ObjectOutputStream oout = xstream.createObjectOutputStream(new StringWriter()); oout.writeObject(new Integer(1)); oout.close(); oout.close(); } public void testObjectOutputStreamCloseAndFlush() throws IOException { ObjectOutputStream oout = xstream.createObjectOutputStream(new StringWriter()); oout.writeObject(new Integer(1)); oout.close(); try { oout.flush(); fail("Closing and flushing should throw a StreamException"); } catch (StreamException e) { // ok } } public void testObjectOutputStreamCloseAndWrite() throws IOException { ObjectOutputStream oout = xstream.createObjectOutputStream(new StringWriter()); oout.writeObject(new Integer(1)); oout.close(); try { oout.writeObject(new Integer(2)); fail("Closing and writing should throw a StreamException"); } catch (StreamException e) { // ok } } public void testUnmarshalsFromFile() throws IOException { File file = createTestFile(); xstream.registerConverter(new ElementConverter()); xstream.alias("component", Component.class); Component person = (Component)xstream.fromXML(file); assertEquals(8000, person.port); } public void testUnmarshalsFromURL() throws IOException { File file = createTestFile(); xstream.alias("component", Component.class); Component person = (Component)xstream.fromXML(file); assertEquals(8000, person.port); } private File createTestFile() throws FileNotFoundException, IOException, UnsupportedEncodingException { String xml ="" + "<component>\n" + " <host>host</host>\n" + " <port>8000</port>\n" + "</component>"; File dir = new File("target/test-data"); dir.mkdirs(); File file = new File(dir, "test.xml"); FileOutputStream fos = new FileOutputStream(file); fos.write(xml.getBytes("UTF-8")); fos.close(); return file; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.ex; import com.intellij.ide.DataManager; import com.intellij.ide.util.ClassFilter; import com.intellij.ide.util.TreeClassChooser; import com.intellij.ide.util.TreeClassChooserFactory; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.ui.InputValidatorEx; import com.intellij.openapi.ui.ex.MultiLineLabel; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiNameHelper; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.ui.*; import com.intellij.ui.table.JBTable; import com.intellij.util.ui.ItemRemovable; import com.intellij.util.ui.JBDimension; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.table.*; import java.awt.*; import java.util.List; class ClassPatternsPanel extends JPanel { private final List<EntryPointsManagerBase.ClassPattern> myModifiedPatterns; private final JBTable myTable; public ClassPatternsPanel(List<EntryPointsManagerBase.ClassPattern> patterns) { super(new BorderLayout()); myModifiedPatterns = patterns; myTable = createTableForPatterns(); final String addClassMessage = "Add Class"; final ToolbarDecorator toolbarDecorator = ToolbarDecorator.createDecorator(myTable) .setAddAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { Project project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(myTable)); if (project == null) project = ProjectManager.getInstance().getDefaultProject(); TreeClassChooser chooser = TreeClassChooserFactory.getInstance(project) .createWithInnerClassesScopeChooser(addClassMessage, GlobalSearchScope.allScope(project), ClassFilter.ALL, null); chooser.showDialog(); final PsiClass selected = chooser.getSelected(); if (selected != null) { insertRow(selected.getQualifiedName()); } } }) .setAddActionName(addClassMessage) .setRemoveAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { TableUtil.removeSelectedItems(myTable); myTable.repaint(); } }) .setRemoveActionUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { return myTable.getSelectedRow() >= 0; } }); add(SeparatorFactory.createSeparator("Mark code as entry point if qualified name matches", null), BorderLayout.NORTH); add(toolbarDecorator.createPanel(), BorderLayout.CENTER); add(new MultiLineLabel("Leave method blank to represent constructors\n" + "Any * will match against one or more characters in the qualified name (including dots)"), BorderLayout.SOUTH); setPreferredSize(new JBDimension(-1, 250)); } private void insertRow(String pattern) { EntryPointsManagerBase.ClassPattern classPattern = new EntryPointsManagerBase.ClassPattern(); classPattern.pattern = pattern; myModifiedPatterns.add(classPattern); AbstractTableModel model = (AbstractTableModel)myTable.getModel(); final int row = myModifiedPatterns.size() - 1; model.fireTableRowsInserted(row, row); myTable.setRowSelectionInterval(row, row); } private JBTable createTableForPatterns() { TableModel dataModel = new MyTableModel(); final JBTable result = new JBTable(dataModel); result.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION); result.getColumnModel().getColumn(2).setCellRenderer(new DefaultTableCellRenderer() { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { final Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); if (value instanceof String && ((String)value).isEmpty()) { setText("constructors"); setForeground(UIUtil.getInactiveTextColor()); } else if (value instanceof String) { setText((String)value); setForeground(UIUtil.getTableForeground(isSelected)); } return component; } }); TableCellEditor editor = result.getDefaultEditor(String.class); if (editor instanceof DefaultCellEditor) { ((DefaultCellEditor)editor).setClickCountToStart(1); } final TableColumn column = result.getTableHeader().getColumnModel().getColumn(0); column.setResizable(false); final int width = 15 + result.getTableHeader().getFontMetrics(result.getTableHeader().getFont()).stringWidth(result.getColumnName(0)); column.setMaxWidth(width); column.setMinWidth(width); return result; } public String getValidationError(Project project) { TableUtil.stopEditing(myTable); final PsiNameHelper nameHelper = PsiNameHelper.getInstance(project); final ClassPatternValidator validator = new ClassPatternValidator(nameHelper); for (EntryPointsManagerBase.ClassPattern pattern : myModifiedPatterns) { final String errorText = validator.getErrorText(pattern.pattern); if (errorText != null) { return errorText; } final String subst = pattern.method.replace("*", ""); if (!subst.isEmpty()) { if (!nameHelper.isIdentifier(subst)) { return "Method pattern '" + pattern.method + "' must be a valid java identifier, only '*' are accepted as placeholders"; } } } return null; } private static class ClassPatternValidator implements InputValidatorEx { public static final String ERROR_MESSAGE = "Pattern must be a valid java qualified name, only '*' are accepted as placeholders"; private final PsiNameHelper myNameHelper; public ClassPatternValidator(PsiNameHelper nameHelper) { myNameHelper = nameHelper; } @Nullable @Override public String getErrorText(String inputString) { if (inputString.startsWith(".")) return ERROR_MESSAGE; final String qName = inputString.replace("*", "").replace(".", ""); return !StringUtil.isEmpty(qName) && !myNameHelper.isQualifiedName(qName) ? ERROR_MESSAGE : null; } @Override public boolean checkInput(String inputString) { return getErrorText(inputString) == null; } @Override public boolean canClose(String inputString) { return getErrorText(inputString) == null; } } private class MyTableModel extends AbstractTableModel implements ItemRemovable { private final String[] myNames; public MyTableModel() { myNames = new String[] {"With Subclasses", "Class", "Method"}; } public int getColumnCount() { return 3; } public int getRowCount() { return myModifiedPatterns.size(); } @Nullable public Object getValueAt(int row, int col) { if (row < 0 || row > myModifiedPatterns.size() - 1) return null; final EntryPointsManagerBase.ClassPattern classPattern = myModifiedPatterns.get(row); if (classPattern == null) return null; if (col == 0) { return classPattern.hierarchically; } if (col == 1) { return classPattern.pattern; } return classPattern.method; } public String getColumnName(int column) { return myNames[column]; } public Class getColumnClass(int col) { if (col == 0) { return Boolean.class; } if (col == 1) { return String.class; } if (col == 2) { return String.class; } throw new IllegalArgumentException(String.valueOf(col)); } public boolean isCellEditable(int row, int col) { return true; } public void setValueAt(Object aValue, int row, int col) { EntryPointsManagerBase.ClassPattern classPattern = myModifiedPatterns.get(row); if (classPattern == null) return; if (col == 0) { classPattern.hierarchically = (boolean)aValue; } else if (col == 1){ classPattern.pattern = (String)aValue; } else { classPattern.method = (String)aValue; } fireTableRowsUpdated(row, row); } @Override public void removeRow(int idx) { myModifiedPatterns.remove(idx); fireTableRowsDeleted(idx, idx); } } }
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.io.socketConnection.impl; import com.intellij.util.EventDispatcher; import com.intellij.util.io.socketConnection.*; import consulo.disposer.Disposable; import consulo.disposer.Disposer; import consulo.logging.Logger; import consulo.util.collection.primitive.ints.IntMaps; import consulo.util.collection.primitive.ints.IntObjectMap; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import java.util.ArrayList; import java.util.List; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicReference; /** * @author nik */ public abstract class SocketConnectionBase<Request extends AbstractRequest, Response extends AbstractResponse> implements SocketConnection<Request, Response> { private static final Logger LOG = Logger.getInstance(SocketConnectionBase.class); private final Object myLock = new Object(); private int myPort = -1; private final AtomicReference<ConnectionState> myState = new AtomicReference<ConnectionState>(new ConnectionState(ConnectionStatus.NOT_CONNECTED)); private boolean myStopping; private final EventDispatcher<SocketConnectionListener> myDispatcher = EventDispatcher.create(SocketConnectionListener.class); private List<Thread> myThreadsToInterrupt = new ArrayList<Thread>(); private final RequestResponseExternalizerFactory<Request, Response> myExternalizerFactory; private final LinkedBlockingQueue<Request> myRequests = new LinkedBlockingQueue<Request>(); private final IntObjectMap<TimeoutInfo> myTimeouts = IntMaps.newIntObjectHashMap(); private final ResponseProcessor<Response> myResponseProcessor; public SocketConnectionBase(@Nonnull RequestResponseExternalizerFactory<Request, Response> factory) { myResponseProcessor = new ResponseProcessor<Response>(this); myExternalizerFactory = factory; } @Override public void sendRequest(@Nonnull Request request) { sendRequest(request, null); } @Override public void sendRequest(@Nonnull Request request, @Nullable AbstractResponseToRequestHandler<? extends Response> handler) { if (handler != null) { myResponseProcessor.registerHandler(request.getId(), handler); } try { myRequests.put(request); } catch (InterruptedException ignored) { } } @Override public void sendRequest(@Nonnull Request request, @Nullable AbstractResponseToRequestHandler<? extends Response> handler, int timeout, @Nonnull Runnable onTimeout) { myTimeouts.put(request.getId(), new TimeoutInfo(timeout, onTimeout)); sendRequest(request, handler); } @Override public <R extends Response> void registerHandler(@Nonnull Class<R> responseClass, @Nonnull AbstractResponseHandler<R> handler) { myResponseProcessor.registerHandler(responseClass, handler); } @Override public boolean isStopping() { synchronized (myLock) { return myStopping; } } protected void processRequests(RequestWriter<Request> writer) throws IOException { addThreadToInterrupt(); try { while (!isStopping()) { final Request request = myRequests.take(); LOG.debug("sending request: " + request); final TimeoutInfo timeoutInfo = myTimeouts.remove(request.getId()); if (timeoutInfo != null) { myResponseProcessor.registerTimeoutHandler(request.getId(), timeoutInfo.myTimeout, timeoutInfo.myOnTimeout); } writer.writeRequest(request); } } catch (InterruptedException ignored) { } setStatus(ConnectionStatus.DISCONNECTED, null); removeThreadToInterrupt(); } protected void addThreadToInterrupt() { synchronized (myLock) { myThreadsToInterrupt.add(Thread.currentThread()); } } protected void removeThreadToInterrupt() { synchronized (myLock) { myThreadsToInterrupt.remove(Thread.currentThread()); } } @Override public void dispose() { LOG.debug("Firefox connection disposed"); } @Override public int getPort() { return myPort; } protected void setStatus(@Nonnull ConnectionStatus status, @Nullable String message) { synchronized (myLock) { myState.set(new ConnectionState(status, message, null)); } myDispatcher.getMulticaster().statusChanged(status); } @Override @Nonnull public ConnectionState getState() { synchronized (myLock) { return myState.get(); } } @Override public void addListener(@Nonnull SocketConnectionListener listener, @Nullable Disposable parentDisposable) { if (parentDisposable != null) { myDispatcher.addListener(listener, parentDisposable); } else { myDispatcher.addListener(listener); } } @Override public void close() { synchronized (myLock) { if (myStopping) return; myStopping = true; } LOG.debug("closing connection"); synchronized (myLock) { for (Thread thread : myThreadsToInterrupt) { thread.interrupt(); } } onClosing(); myResponseProcessor.stopReading(); Disposer.dispose(this); } protected void onClosing() { } protected void attachToSocket(Socket socket) throws IOException { setStatus(ConnectionStatus.CONNECTED, null); LOG.debug("connected"); final OutputStream outputStream = socket.getOutputStream(); final InputStream inputStream = socket.getInputStream(); myResponseProcessor.startReading(myExternalizerFactory.createResponseReader(inputStream)); processRequests(myExternalizerFactory.createRequestWriter(outputStream)); } protected void setPort(int port) { myPort = port; } private static class TimeoutInfo { private int myTimeout; private Runnable myOnTimeout; private TimeoutInfo(int timeout, Runnable onTimeout) { myTimeout = timeout; myOnTimeout = onTimeout; } } }
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the files COPYING and Copyright.html. COPYING can be found at the root * * of the source code distribution tree; Copyright.html can be found at the * * root level of an installed copy of the electronic HDF5 document set and * * is linked from the top-level documents page. It can also be found at * * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * * access to either file, you may request a copy from help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /************************************************************ This example shows how to read and write bitfield datatypes to a dataset. The program first writes bit fields to a dataset with a dataspace of DIM0xDIM1, then closes the file. Next, it reopens the file, reads back the data, and outputs it to the screen. ************************************************************/ package examples.datatypes; import hdf.hdf5lib.H5; import hdf.hdf5lib.HDF5Constants; public class H5Ex_T_Bit { private static String FILENAME = "H5Ex_T_Bit.h5"; private static String DATASETNAME = "DS1"; private static final int DIM0 = 4; private static final int DIM1 = 7; private static final int RANK = 2; private static void CreateDataset() { long file_id = -1; long dataspace_id = -1; long dataset_id = -1; long[] dims = { DIM0, DIM1 }; int[][] dset_data = new int[DIM0][DIM1]; // Initialize data. for (int indx = 0; indx < DIM0; indx++) for (int jndx = 0; jndx < DIM1; jndx++) { dset_data[indx][jndx] = 0; dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */ dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */ } // Create a new file using default properties. try { file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); } catch (Exception e) { e.printStackTrace(); } // Create dataspace. Setting maximum size to NULL sets the maximum // size to be the current size. try { dataspace_id = H5.H5Screate_simple(RANK, dims, null); } catch (Exception e) { e.printStackTrace(); } // Create the dataset. try { if ((file_id >= 0) && (dataspace_id >= 0)) dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_B8BE, dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); } catch (Exception e) { e.printStackTrace(); } // Write the bitfield data to the dataset. try { if (dataset_id >= 0) H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data); } catch (Exception e) { e.printStackTrace(); } // End access to the dataset and release resources used by it. try { if (dataset_id >= 0) H5.H5Dclose(dataset_id); } catch (Exception e) { e.printStackTrace(); } // Terminate access to the data space. try { if (dataspace_id >= 0) H5.H5Sclose(dataspace_id); } catch (Exception e) { e.printStackTrace(); } // Close the file. try { if (file_id >= 0) H5.H5Fclose(file_id); } catch (Exception e) { e.printStackTrace(); } } private static void ReadDataset() { long file_id = -1; long dataspace_id = -1; long dataset_id = -1; long[] dims = { DIM0, DIM1 }; int[][] dset_data; // Open an existing file. try { file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); } catch (Exception e) { e.printStackTrace(); } // Open an existing dataset. try { if (file_id >= 0) dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); } catch (Exception e) { e.printStackTrace(); } // Get dataspace and allocate memory for read buffer. try { if (dataset_id >= 0) dataspace_id = H5.H5Dget_space(dataset_id); } catch (Exception e) { e.printStackTrace(); } try { if (dataspace_id >= 0) H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); } catch (Exception e) { e.printStackTrace(); } // Allocate array of pointers to two-dimensional arrays (the // elements of the dataset. dset_data = new int[(int) dims[0]][(int) (dims[1])]; // Read data. try { if (dataset_id >= 0) H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data); } catch (Exception e) { e.printStackTrace(); } // Output the data to the screen. System.out.println(DATASETNAME + ":"); for (int indx = 0; indx < dims[0]; indx++) { System.out.print(" ["); for (int jndx = 0; jndx < dims[1]; jndx++) { System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", "); System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", "); System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", "); System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}"); } System.out.println("]"); } System.out.println(); // End access to the dataset and release resources used by it. try { if (dataset_id >= 0) H5.H5Dclose(dataset_id); } catch (Exception e) { e.printStackTrace(); } // Terminate access to the data space. try { if (dataspace_id >= 0) H5.H5Sclose(dataspace_id); } catch (Exception e) { e.printStackTrace(); } // Close the file. try { if (file_id >= 0) H5.H5Fclose(file_id); } catch (Exception e) { e.printStackTrace(); } } public static void main(String[] args) { H5Ex_T_Bit.CreateDataset(); // Now we begin the read section of this example. Here we assume // the dataset and array have the same name and rank, but can have // any size. Therefore we must allocate a new array to read in // data using malloc(). H5Ex_T_Bit.ReadDataset(); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.appstream.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/appstream-2016-12-01/UpdateStack" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateStackRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The stack name to display. * </p> */ private String displayName; /** * <p> * The description to display. * </p> */ private String description; /** * <p> * The name of the stack. * </p> */ private String name; /** * <p> * The storage connectors to enable. * </p> */ private java.util.List<StorageConnector> storageConnectors; /** * <p> * Deletes the storage connectors currently enabled for the stack. * </p> */ @Deprecated private Boolean deleteStorageConnectors; /** * <p> * The URL that users are redirected to after their streaming session ends. * </p> */ private String redirectURL; /** * <p> * The URL that users are redirected to after they choose the Send Feedback link. If no URL is specified, no Send * Feedback link is displayed. * </p> */ private String feedbackURL; /** * <p> * The stack attributes to delete. * </p> */ private java.util.List<String> attributesToDelete; /** * <p> * The actions that are enabled or disabled for users during their streaming sessions. By default, these actions are * enabled. * </p> */ private java.util.List<UserSetting> userSettings; /** * <p> * The persistent application settings for users of a stack. When these settings are enabled, changes that users * make to applications and Windows settings are automatically saved after each session and applied to the next * session. * </p> */ private ApplicationSettings applicationSettings; /** * <p> * The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to AppStream * 2.0 only through the specified endpoints. * </p> */ private java.util.List<AccessEndpoint> accessEndpoints; /** * <p> * The stack name to display. * </p> * * @param displayName * The stack name to display. */ public void setDisplayName(String displayName) { this.displayName = displayName; } /** * <p> * The stack name to display. * </p> * * @return The stack name to display. */ public String getDisplayName() { return this.displayName; } /** * <p> * The stack name to display. * </p> * * @param displayName * The stack name to display. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withDisplayName(String displayName) { setDisplayName(displayName); return this; } /** * <p> * The description to display. * </p> * * @param description * The description to display. */ public void setDescription(String description) { this.description = description; } /** * <p> * The description to display. * </p> * * @return The description to display. */ public String getDescription() { return this.description; } /** * <p> * The description to display. * </p> * * @param description * The description to display. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * The name of the stack. * </p> * * @param name * The name of the stack. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the stack. * </p> * * @return The name of the stack. */ public String getName() { return this.name; } /** * <p> * The name of the stack. * </p> * * @param name * The name of the stack. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withName(String name) { setName(name); return this; } /** * <p> * The storage connectors to enable. * </p> * * @return The storage connectors to enable. */ public java.util.List<StorageConnector> getStorageConnectors() { return storageConnectors; } /** * <p> * The storage connectors to enable. * </p> * * @param storageConnectors * The storage connectors to enable. */ public void setStorageConnectors(java.util.Collection<StorageConnector> storageConnectors) { if (storageConnectors == null) { this.storageConnectors = null; return; } this.storageConnectors = new java.util.ArrayList<StorageConnector>(storageConnectors); } /** * <p> * The storage connectors to enable. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setStorageConnectors(java.util.Collection)} or {@link #withStorageConnectors(java.util.Collection)} if * you want to override the existing values. * </p> * * @param storageConnectors * The storage connectors to enable. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withStorageConnectors(StorageConnector... storageConnectors) { if (this.storageConnectors == null) { setStorageConnectors(new java.util.ArrayList<StorageConnector>(storageConnectors.length)); } for (StorageConnector ele : storageConnectors) { this.storageConnectors.add(ele); } return this; } /** * <p> * The storage connectors to enable. * </p> * * @param storageConnectors * The storage connectors to enable. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withStorageConnectors(java.util.Collection<StorageConnector> storageConnectors) { setStorageConnectors(storageConnectors); return this; } /** * <p> * Deletes the storage connectors currently enabled for the stack. * </p> * * @param deleteStorageConnectors * Deletes the storage connectors currently enabled for the stack. */ @Deprecated public void setDeleteStorageConnectors(Boolean deleteStorageConnectors) { this.deleteStorageConnectors = deleteStorageConnectors; } /** * <p> * Deletes the storage connectors currently enabled for the stack. * </p> * * @return Deletes the storage connectors currently enabled for the stack. */ @Deprecated public Boolean getDeleteStorageConnectors() { return this.deleteStorageConnectors; } /** * <p> * Deletes the storage connectors currently enabled for the stack. * </p> * * @param deleteStorageConnectors * Deletes the storage connectors currently enabled for the stack. * @return Returns a reference to this object so that method calls can be chained together. */ @Deprecated public UpdateStackRequest withDeleteStorageConnectors(Boolean deleteStorageConnectors) { setDeleteStorageConnectors(deleteStorageConnectors); return this; } /** * <p> * Deletes the storage connectors currently enabled for the stack. * </p> * * @return Deletes the storage connectors currently enabled for the stack. */ @Deprecated public Boolean isDeleteStorageConnectors() { return this.deleteStorageConnectors; } /** * <p> * The URL that users are redirected to after their streaming session ends. * </p> * * @param redirectURL * The URL that users are redirected to after their streaming session ends. */ public void setRedirectURL(String redirectURL) { this.redirectURL = redirectURL; } /** * <p> * The URL that users are redirected to after their streaming session ends. * </p> * * @return The URL that users are redirected to after their streaming session ends. */ public String getRedirectURL() { return this.redirectURL; } /** * <p> * The URL that users are redirected to after their streaming session ends. * </p> * * @param redirectURL * The URL that users are redirected to after their streaming session ends. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withRedirectURL(String redirectURL) { setRedirectURL(redirectURL); return this; } /** * <p> * The URL that users are redirected to after they choose the Send Feedback link. If no URL is specified, no Send * Feedback link is displayed. * </p> * * @param feedbackURL * The URL that users are redirected to after they choose the Send Feedback link. If no URL is specified, no * Send Feedback link is displayed. */ public void setFeedbackURL(String feedbackURL) { this.feedbackURL = feedbackURL; } /** * <p> * The URL that users are redirected to after they choose the Send Feedback link. If no URL is specified, no Send * Feedback link is displayed. * </p> * * @return The URL that users are redirected to after they choose the Send Feedback link. If no URL is specified, no * Send Feedback link is displayed. */ public String getFeedbackURL() { return this.feedbackURL; } /** * <p> * The URL that users are redirected to after they choose the Send Feedback link. If no URL is specified, no Send * Feedback link is displayed. * </p> * * @param feedbackURL * The URL that users are redirected to after they choose the Send Feedback link. If no URL is specified, no * Send Feedback link is displayed. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withFeedbackURL(String feedbackURL) { setFeedbackURL(feedbackURL); return this; } /** * <p> * The stack attributes to delete. * </p> * * @return The stack attributes to delete. * @see StackAttribute */ public java.util.List<String> getAttributesToDelete() { return attributesToDelete; } /** * <p> * The stack attributes to delete. * </p> * * @param attributesToDelete * The stack attributes to delete. * @see StackAttribute */ public void setAttributesToDelete(java.util.Collection<String> attributesToDelete) { if (attributesToDelete == null) { this.attributesToDelete = null; return; } this.attributesToDelete = new java.util.ArrayList<String>(attributesToDelete); } /** * <p> * The stack attributes to delete. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setAttributesToDelete(java.util.Collection)} or {@link #withAttributesToDelete(java.util.Collection)} if * you want to override the existing values. * </p> * * @param attributesToDelete * The stack attributes to delete. * @return Returns a reference to this object so that method calls can be chained together. * @see StackAttribute */ public UpdateStackRequest withAttributesToDelete(String... attributesToDelete) { if (this.attributesToDelete == null) { setAttributesToDelete(new java.util.ArrayList<String>(attributesToDelete.length)); } for (String ele : attributesToDelete) { this.attributesToDelete.add(ele); } return this; } /** * <p> * The stack attributes to delete. * </p> * * @param attributesToDelete * The stack attributes to delete. * @return Returns a reference to this object so that method calls can be chained together. * @see StackAttribute */ public UpdateStackRequest withAttributesToDelete(java.util.Collection<String> attributesToDelete) { setAttributesToDelete(attributesToDelete); return this; } /** * <p> * The stack attributes to delete. * </p> * * @param attributesToDelete * The stack attributes to delete. * @return Returns a reference to this object so that method calls can be chained together. * @see StackAttribute */ public UpdateStackRequest withAttributesToDelete(StackAttribute... attributesToDelete) { java.util.ArrayList<String> attributesToDeleteCopy = new java.util.ArrayList<String>(attributesToDelete.length); for (StackAttribute value : attributesToDelete) { attributesToDeleteCopy.add(value.toString()); } if (getAttributesToDelete() == null) { setAttributesToDelete(attributesToDeleteCopy); } else { getAttributesToDelete().addAll(attributesToDeleteCopy); } return this; } /** * <p> * The actions that are enabled or disabled for users during their streaming sessions. By default, these actions are * enabled. * </p> * * @return The actions that are enabled or disabled for users during their streaming sessions. By default, these * actions are enabled. */ public java.util.List<UserSetting> getUserSettings() { return userSettings; } /** * <p> * The actions that are enabled or disabled for users during their streaming sessions. By default, these actions are * enabled. * </p> * * @param userSettings * The actions that are enabled or disabled for users during their streaming sessions. By default, these * actions are enabled. */ public void setUserSettings(java.util.Collection<UserSetting> userSettings) { if (userSettings == null) { this.userSettings = null; return; } this.userSettings = new java.util.ArrayList<UserSetting>(userSettings); } /** * <p> * The actions that are enabled or disabled for users during their streaming sessions. By default, these actions are * enabled. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setUserSettings(java.util.Collection)} or {@link #withUserSettings(java.util.Collection)} if you want to * override the existing values. * </p> * * @param userSettings * The actions that are enabled or disabled for users during their streaming sessions. By default, these * actions are enabled. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withUserSettings(UserSetting... userSettings) { if (this.userSettings == null) { setUserSettings(new java.util.ArrayList<UserSetting>(userSettings.length)); } for (UserSetting ele : userSettings) { this.userSettings.add(ele); } return this; } /** * <p> * The actions that are enabled or disabled for users during their streaming sessions. By default, these actions are * enabled. * </p> * * @param userSettings * The actions that are enabled or disabled for users during their streaming sessions. By default, these * actions are enabled. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withUserSettings(java.util.Collection<UserSetting> userSettings) { setUserSettings(userSettings); return this; } /** * <p> * The persistent application settings for users of a stack. When these settings are enabled, changes that users * make to applications and Windows settings are automatically saved after each session and applied to the next * session. * </p> * * @param applicationSettings * The persistent application settings for users of a stack. When these settings are enabled, changes that * users make to applications and Windows settings are automatically saved after each session and applied to * the next session. */ public void setApplicationSettings(ApplicationSettings applicationSettings) { this.applicationSettings = applicationSettings; } /** * <p> * The persistent application settings for users of a stack. When these settings are enabled, changes that users * make to applications and Windows settings are automatically saved after each session and applied to the next * session. * </p> * * @return The persistent application settings for users of a stack. When these settings are enabled, changes that * users make to applications and Windows settings are automatically saved after each session and applied to * the next session. */ public ApplicationSettings getApplicationSettings() { return this.applicationSettings; } /** * <p> * The persistent application settings for users of a stack. When these settings are enabled, changes that users * make to applications and Windows settings are automatically saved after each session and applied to the next * session. * </p> * * @param applicationSettings * The persistent application settings for users of a stack. When these settings are enabled, changes that * users make to applications and Windows settings are automatically saved after each session and applied to * the next session. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withApplicationSettings(ApplicationSettings applicationSettings) { setApplicationSettings(applicationSettings); return this; } /** * <p> * The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to AppStream * 2.0 only through the specified endpoints. * </p> * * @return The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to * AppStream 2.0 only through the specified endpoints. */ public java.util.List<AccessEndpoint> getAccessEndpoints() { return accessEndpoints; } /** * <p> * The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to AppStream * 2.0 only through the specified endpoints. * </p> * * @param accessEndpoints * The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to * AppStream 2.0 only through the specified endpoints. */ public void setAccessEndpoints(java.util.Collection<AccessEndpoint> accessEndpoints) { if (accessEndpoints == null) { this.accessEndpoints = null; return; } this.accessEndpoints = new java.util.ArrayList<AccessEndpoint>(accessEndpoints); } /** * <p> * The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to AppStream * 2.0 only through the specified endpoints. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setAccessEndpoints(java.util.Collection)} or {@link #withAccessEndpoints(java.util.Collection)} if you * want to override the existing values. * </p> * * @param accessEndpoints * The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to * AppStream 2.0 only through the specified endpoints. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withAccessEndpoints(AccessEndpoint... accessEndpoints) { if (this.accessEndpoints == null) { setAccessEndpoints(new java.util.ArrayList<AccessEndpoint>(accessEndpoints.length)); } for (AccessEndpoint ele : accessEndpoints) { this.accessEndpoints.add(ele); } return this; } /** * <p> * The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to AppStream * 2.0 only through the specified endpoints. * </p> * * @param accessEndpoints * The list of virtual private cloud (VPC) interface endpoint objects. Users of the stack can connect to * AppStream 2.0 only through the specified endpoints. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateStackRequest withAccessEndpoints(java.util.Collection<AccessEndpoint> accessEndpoints) { setAccessEndpoints(accessEndpoints); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDisplayName() != null) sb.append("DisplayName: ").append(getDisplayName()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getStorageConnectors() != null) sb.append("StorageConnectors: ").append(getStorageConnectors()).append(","); if (getDeleteStorageConnectors() != null) sb.append("DeleteStorageConnectors: ").append(getDeleteStorageConnectors()).append(","); if (getRedirectURL() != null) sb.append("RedirectURL: ").append(getRedirectURL()).append(","); if (getFeedbackURL() != null) sb.append("FeedbackURL: ").append(getFeedbackURL()).append(","); if (getAttributesToDelete() != null) sb.append("AttributesToDelete: ").append(getAttributesToDelete()).append(","); if (getUserSettings() != null) sb.append("UserSettings: ").append(getUserSettings()).append(","); if (getApplicationSettings() != null) sb.append("ApplicationSettings: ").append(getApplicationSettings()).append(","); if (getAccessEndpoints() != null) sb.append("AccessEndpoints: ").append(getAccessEndpoints()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateStackRequest == false) return false; UpdateStackRequest other = (UpdateStackRequest) obj; if (other.getDisplayName() == null ^ this.getDisplayName() == null) return false; if (other.getDisplayName() != null && other.getDisplayName().equals(this.getDisplayName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getStorageConnectors() == null ^ this.getStorageConnectors() == null) return false; if (other.getStorageConnectors() != null && other.getStorageConnectors().equals(this.getStorageConnectors()) == false) return false; if (other.getDeleteStorageConnectors() == null ^ this.getDeleteStorageConnectors() == null) return false; if (other.getDeleteStorageConnectors() != null && other.getDeleteStorageConnectors().equals(this.getDeleteStorageConnectors()) == false) return false; if (other.getRedirectURL() == null ^ this.getRedirectURL() == null) return false; if (other.getRedirectURL() != null && other.getRedirectURL().equals(this.getRedirectURL()) == false) return false; if (other.getFeedbackURL() == null ^ this.getFeedbackURL() == null) return false; if (other.getFeedbackURL() != null && other.getFeedbackURL().equals(this.getFeedbackURL()) == false) return false; if (other.getAttributesToDelete() == null ^ this.getAttributesToDelete() == null) return false; if (other.getAttributesToDelete() != null && other.getAttributesToDelete().equals(this.getAttributesToDelete()) == false) return false; if (other.getUserSettings() == null ^ this.getUserSettings() == null) return false; if (other.getUserSettings() != null && other.getUserSettings().equals(this.getUserSettings()) == false) return false; if (other.getApplicationSettings() == null ^ this.getApplicationSettings() == null) return false; if (other.getApplicationSettings() != null && other.getApplicationSettings().equals(this.getApplicationSettings()) == false) return false; if (other.getAccessEndpoints() == null ^ this.getAccessEndpoints() == null) return false; if (other.getAccessEndpoints() != null && other.getAccessEndpoints().equals(this.getAccessEndpoints()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDisplayName() == null) ? 0 : getDisplayName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getStorageConnectors() == null) ? 0 : getStorageConnectors().hashCode()); hashCode = prime * hashCode + ((getDeleteStorageConnectors() == null) ? 0 : getDeleteStorageConnectors().hashCode()); hashCode = prime * hashCode + ((getRedirectURL() == null) ? 0 : getRedirectURL().hashCode()); hashCode = prime * hashCode + ((getFeedbackURL() == null) ? 0 : getFeedbackURL().hashCode()); hashCode = prime * hashCode + ((getAttributesToDelete() == null) ? 0 : getAttributesToDelete().hashCode()); hashCode = prime * hashCode + ((getUserSettings() == null) ? 0 : getUserSettings().hashCode()); hashCode = prime * hashCode + ((getApplicationSettings() == null) ? 0 : getApplicationSettings().hashCode()); hashCode = prime * hashCode + ((getAccessEndpoints() == null) ? 0 : getAccessEndpoints().hashCode()); return hashCode; } @Override public UpdateStackRequest clone() { return (UpdateStackRequest) super.clone(); } }
/** * Licensed to Apereo under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright ownership. Apereo * licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the License at the * following location: * * <p>http://www.apache.org/licenses/LICENSE-2.0 * * <p>Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package org.apereo.portal.security.oauth; import io.jsonwebtoken.Claims; import io.jsonwebtoken.Jws; import io.jsonwebtoken.JwtBuilder; import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureAlgorithm; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.PostConstruct; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; import org.apereo.portal.groups.IEntityGroup; import org.apereo.portal.groups.IGroupMember; import org.apereo.portal.security.IPerson; import org.apereo.portal.services.GroupService; import org.apereo.portal.soffit.Headers; import org.apereo.portal.soffit.service.AbstractJwtService; import org.apereo.services.persondir.IPersonAttributeDao; import org.apereo.services.persondir.IPersonAttributes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpHeaders; import org.springframework.stereotype.Component; /** * Produces OIDC ID Tokens for the OIDC userinfo endpoint. Supports nearly all the Standard Claims * as defined by OpenID Connect Core 1.0 (http://openid.net/specs/openid-connect-core-1_0.html). * * @since 5.1 */ @Component public class IdTokenFactory { private static final String LIST_SEPARATOR = ","; @Value("${portal.protocol.server.context}") private String issuer; @Autowired private IPersonAttributeDao personAttributeDao; @Value( "${" + AbstractJwtService.SIGNATURE_KEY_PROPERTY + ":" + AbstractJwtService.DEFAULT_SIGNATURE_KEY + "}") private String signatureKey; @Value("${org.apereo.portal.security.oauth.IdTokenFactory.timeoutSeconds:300}") private long timeoutSeconds; /* * OpenID Standard Claims * * Mapping to uPortal user attributes; defaults (where specified) are based on the Lightweight * Directory Access Protocol (LDAP): Schema for User Applications * (https://tools.ietf.org/html/rfc4519) and the eduPerson Object Class Specification * (http://software.internet2.edu/eduperson/internet2-mace-dir-eduperson-201310.html), except * 'username' and 'displayName' (which are a uPortal standards). */ /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.name:displayName}") private String nameAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.given_name:givenName}") private String givenNameAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.family_name:sn}") private String familyNameAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.middle_name:}") private String middleNameAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.nickname:eduPersonNickname}") private String nicknameAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.preferred_username:}") private String preferredUsernameAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.profile:}") private String profileAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.picture:}") private String pictureAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.website:}") private String websiteAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.email:mail}") private String emailAttr; /** JSON data type 'boolean' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.email_verified:}") private String emailVerifiedAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.gender:}") private String genderAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.birthdate:}") private String birthdateAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.zoneinfo:}") private String zoneinfoAttr; /** JSON data type 'string' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.locale:}") private String localeAttr; /** JSON data type 'string' */ @Value( "${org.apereo.portal.security.oauth.IdTokenFactory.mapping.phone_number:telephoneNumber}") private String phoneNumberAttr; /** JSON data type 'boolean' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.phone_number_verified:}") private String phoneNumberVerifiedAttr; /* * NB: The 'address' claim requires additional complexity b/c it's type is JSON object. In * light of that, and because most portals don't have address info in the user attributes * collection, we'll skip it (for now), */ /** JSON data type 'number' */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.mapping.updated_at:}") private String updatedAtAttributeName; /* * uPortal Custom Claims */ /** * The custom claim <code>groups</code> may contain some or all of the user's group * affiliations. Use the Spring property <code> * org.apereo.portal.security.oauth.IdTokenFactory.groups.whitelist</code> to control which * portal groups are included in the claim. */ @Value( "${org.apereo.portal.security.oauth.IdTokenFactory.groups.whitelist:Students,Faculty,Staff,Portal Administrators}") private String groupsWhitelistProperty; /** * Additional user attributes in the portal may be included in the ID Token as custom claims. * Use the Spring property <code>org.apereo.portal.security.oauth.IdTokenFactory.customClaims * </code> to specify which additional attributes to include. The claim name will always be the * same as the attribute name. The JSON type of a custom claim will be inferred from it's value. */ @Value("${org.apereo.portal.security.oauth.IdTokenFactory.customClaims:}") private String customClaimsProperty; private Set<ClaimMapping> mappings; private Set<String> groupsWhitelist; private Set<String> customClaims; private Logger logger = LoggerFactory.getLogger(getClass()); @PostConstruct public void init() { // Mappings for Standard Claims final Set<ClaimMapping> set = new HashSet<>(); set.add(new ClaimMapping("name", nameAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("given_name", givenNameAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("family_name", familyNameAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("middle_name", middleNameAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("nickname", nicknameAttr, DataTypeConverter.STRING)); set.add( new ClaimMapping( "preferred_username", preferredUsernameAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("profile", profileAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("picture", pictureAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("website", websiteAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("email", emailAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("email_verified", emailVerifiedAttr, DataTypeConverter.BOOLEAN)); set.add(new ClaimMapping("gender", genderAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("birthdate", birthdateAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("zoneinfo", zoneinfoAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("locale", localeAttr, DataTypeConverter.STRING)); set.add(new ClaimMapping("phone_number", phoneNumberAttr, DataTypeConverter.STRING)); set.add( new ClaimMapping( "phone_number_verified", phoneNumberVerifiedAttr, DataTypeConverter.BOOLEAN)); set.add(new ClaimMapping("updated_at", updatedAtAttributeName, DataTypeConverter.NUMBER)); mappings = Collections.unmodifiableSet(set); if (logger.isInfoEnabled()) { final StringBuilder msg = new StringBuilder(); msg.append("Using the following mappings for OIDC Standard Claims:"); set.forEach(mapping -> msg.append("\n\t").append(mapping)); logger.info(msg.toString()); } // Portal Groups ('groups' custom claim) groupsWhitelist = Collections.unmodifiableSet( Arrays.stream(groupsWhitelistProperty.split(LIST_SEPARATOR)) .map(String::trim) .filter(item -> item.length() != 0) .collect(Collectors.toSet())); logger.info( "Using the following portal groups to build the custom 'groups' claim: {}", groupsWhitelist); // Other Custom Claims (a.k.a user attributes) customClaims = Collections.unmodifiableSet( Arrays.stream(customClaimsProperty.split(LIST_SEPARATOR)) .map(String::trim) .filter(item -> item.length() != 0) .collect(Collectors.toSet())); logger.info("Using the following custom claims: {}", customClaims); } public String createUserInfo(String username) { return this.createUserInfo(username, null, null); } public String createUserInfo( String username, Set<String> claimsToInclude, Set<String> groupsToInclude) { final Date now = new Date(); final Date expires = new Date(now.getTime() + (timeoutSeconds * 1000L)); final JwtBuilder builder = Jwts.builder() .setIssuer(issuer) .setSubject(username) .setAudience(issuer) .setExpiration(expires) .setIssuedAt(now); final IPersonAttributes person = personAttributeDao.getPerson(username); // Attribute mappings mappings.stream() .filter(mapping -> includeClaim(mapping.getClaimName(), claimsToInclude)) .forEach( item -> { final Object value = person.getAttributeValue(item.getAttributeName()); if (value != null) { builder.claim( item.getClaimName(), item.getDataTypeConverter().convert(value)); } }); // Groups final List<String> groups = new ArrayList<>(); final IGroupMember groupMember = GroupService.getGroupMember(username, IPerson.class); if (groupMember != null) { final Set<IEntityGroup> ancestors = groupMember.getAncestorGroups(); for (IEntityGroup g : ancestors) { if (includeGroup(g, groupsToInclude)) { groups.add(g.getName()); } } } if (!groups.isEmpty()) { /* * If a Claim is not returned, that Claim Name SHOULD be omitted from the JSON object * representing the Claims; it SHOULD NOT be present with a null or empty string value. */ builder.claim("groups", groups); } // Default custom claims defined by uPortal.properties customClaims.stream() .filter(claimName -> includeClaim(claimName, claimsToInclude)) .map( attributeName -> new CustomClaim( attributeName, person.getAttributeValues(attributeName))) .filter(claim -> claim.getClaimValue() != null) .forEach(claim -> builder.claim(claim.getClaimName(), claim.getClaimValue())); final String rslt = builder.signWith(SignatureAlgorithm.HS512, signatureKey).compact(); logger.debug("Produced the following JWT for username='{}': {}", username, rslt); return rslt; } /** * Convenience method for obtaining an OIDC Id Token from a request, if present. * * @return A fully-parsed JWT if a valid bearer token is present in the <code>Authorization * </code> header, otherwise <code>null</code> */ public Jws<Claims> getUserInfo(HttpServletRequest request) { final String bearerToken = getBearerToken(request); return StringUtils.isNotBlank(bearerToken) ? parseBearerToken(bearerToken) : null; } public String getBearerToken(HttpServletRequest request) { final String authorization = request.getHeader(HttpHeaders.AUTHORIZATION); logger.debug("{} header value: {}", HttpHeaders.AUTHORIZATION, authorization); return StringUtils.isNotBlank(authorization) && authorization.length() > Headers.BEARER_TOKEN_PREFIX.length() ? authorization.substring(Headers.BEARER_TOKEN_PREFIX.length()) : null; } public Jws<Claims> parseBearerToken(String bearerToken) { try { return Jwts.parser().setSigningKey(signatureKey).parseClaimsJws(bearerToken); } catch (Exception e) { logger.warn("Unsupported bearerToken: {}", bearerToken); logger.debug("Stack trace", e); } return null; } private boolean includeClaim(String claimName, Set<String> claimsToInclude) { boolean rslt = true; // default if (claimsToInclude != null && !claimsToInclude.contains(claimName)) { /* * This group is included in the deployed configuration, * but is not wanted by the REST request. */ rslt = false; } return rslt; } private boolean includeGroup(IEntityGroup group, Set<String> groupsToInclude) { boolean rslt = groupsWhitelist.contains(group.getName()); // default if (rslt && groupsToInclude != null && !groupsToInclude.contains(group.getName())) { /* * This group is included in the deployed configuration, * but is not wanted by the REST request. */ rslt = false; } return rslt; } /* * Nested Types */ enum DataTypeConverter { STRING { @Override Object convert(Object inpt) { return inpt.toString(); } }, BOOLEAN { @Override Object convert(Object inpt) { return Boolean.valueOf(inpt.toString()); } }, NUMBER { @Override Object convert(Object inpt) { return new BigDecimal(inpt.toString()); } }; abstract Object convert(Object inpt); } private static final class ClaimMapping { private final String claimName; private final String attributeName; private final DataTypeConverter dataTypeConverter; public ClaimMapping( String claimName, String attributeName, DataTypeConverter dataTypeConverter) { this.claimName = claimName; this.attributeName = attributeName; this.dataTypeConverter = dataTypeConverter; } public String getClaimName() { return claimName; } public String getAttributeName() { return attributeName; } public DataTypeConverter getDataTypeConverter() { return dataTypeConverter; } @Override public String toString() { return "ClaimMapping{" + "claimName='" + claimName + '\'' + ", attributeName='" + attributeName + '\'' + ", dataTypeConverter=" + dataTypeConverter + '}'; } } private static final class CustomClaim { private final String claimName; private final Object claimValue; public CustomClaim(String claimName, Object claimValue) { this.claimName = claimName; this.claimValue = claimValue; } public String getClaimName() { return claimName; } public Object getClaimValue() { return claimValue; } } }
/* * Artifactory is a binaries repository manager. * Copyright (C) 2012 JFrog Ltd. * * Artifactory is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Artifactory is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Artifactory. If not, see <http://www.gnu.org/licenses/>. */ package org.artifactory.common.wicket.component.panel.list; import com.google.common.collect.Lists; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxEventBehavior; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.IAjaxCallDecorator; import org.apache.wicket.extensions.markup.html.repeater.data.sort.SortOrder; import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn; import org.apache.wicket.extensions.markup.html.repeater.util.SortableDataProvider; import org.apache.wicket.markup.html.link.AbstractLink; import org.apache.wicket.markup.repeater.Item; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.artifactory.common.wicket.ajax.CancelDefaultDecorator; import org.artifactory.common.wicket.ajax.ConfirmationAjaxCallDecorator; import org.artifactory.common.wicket.behavior.CssClass; import org.artifactory.common.wicket.component.PlaceHolder; import org.artifactory.common.wicket.component.links.TitledAjaxLink; import org.artifactory.common.wicket.component.panel.titled.TitledPanel; import org.artifactory.common.wicket.component.table.SortableTable; import org.artifactory.common.wicket.component.table.columns.LinksColumn; import org.artifactory.common.wicket.util.ListPropertySorter; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; /** * Contains most of the implementations of the former ListPanel (Now ModalListPanel). Enables separation between list * panel and the action link behavior. * * @author Noam Tenne */ public abstract class BaseListPanel<T extends Serializable> extends TitledPanel { protected static final int DEFAULT_ROWS_PER_PAGE = 15; protected String defaultInitialSortProperty; protected AbstractLink newItemLink; protected SortableDataProvider<T> dataProvider; protected BaseListPanel(String id) { super(id); dataProvider = new DefaultSortableDataProvider(); } protected BaseListPanel(String id, SortableDataProvider<T> dataProvider) { super(id); this.dataProvider = dataProvider; } public final SortableDataProvider getDataProvider() { return dataProvider; } protected void init() { add(new CssClass("list-panel")); // add new item link newItemLink = getNewItemLink("newItemLink", "New"); add(newItemLink); Component exportItemLink = newExportLink("export"); add(exportItemLink); // add table List<IColumn<T>> columns = Lists.newArrayList(); addLinksColumn(columns); addColumns(columns); // by default use the sort property of the second column if (columns.size() > 1) { defaultInitialSortProperty = columns.get(1).getSortProperty(); } add(new MySortableTable(columns, dataProvider)); } /** * @return The property by which the table will initially be sorted by. */ protected String getInitialSortProperty() { return defaultInitialSortProperty; } protected void addLinksColumn(List<? super IColumn<T>> columns) { columns.add(new LinksColumn<T>() { @Override protected Collection<? extends AbstractLink> getLinks(T rowObject, String linkId) { List<AbstractLink> links = new ArrayList<>(); addLinks(links, rowObject, linkId); return links; } }); } protected int getItemsPerPage() { return DEFAULT_ROWS_PER_PAGE; } protected void disableNewItemLink() { newItemLink.setEnabled(false); } @SuppressWarnings({"AbstractMethodOverridesConcreteMethod"}) @Override public abstract String getTitle(); protected abstract List<T> getList(); protected abstract void addColumns(List<? super IColumn<T>> columns); protected abstract String getDeleteConfirmationText(T itemObject); protected abstract void deleteItem(T itemObject, AjaxRequestTarget target); protected abstract AbstractLink getNewItemLink(String linkId, String linkTitle); protected Component newExportLink(String id) { return new PlaceHolder(id); } protected abstract TitledAjaxLink getEditItemLink(final T itemObject, String linkId); protected abstract void onRowItemEvent(String id, int index, final IModel model, AjaxRequestTarget target); private class DefaultSortableDataProvider extends SortableDataProvider<T> { private DefaultSortableDataProvider() { if (defaultInitialSortProperty != null) { setSort(defaultInitialSortProperty, SortOrder.ASCENDING); } } @Override public Iterator<? extends T> iterator(int first, int count) { List<T> items = getList(); ListPropertySorter.sort(items, getSort()); List<T> itemsSubList = items.subList(first, first + count); return itemsSubList.iterator(); } @Override public int size() { return getList().size(); } @Override public IModel<T> model(T object) { return new Model<>(object); } } public SortableTable getTable() { return (SortableTable) get("table"); } private class MySortableTable extends SortableTable<T> { private MySortableTable(List<IColumn<T>> columns, SortableDataProvider<T> dataProvider) { super("table", columns, dataProvider, BaseListPanel.this.getItemsPerPage()); } @Override protected Item<T> newRowItem(final String id, final int index, final IModel<T> model) { Item<T> item = super.newRowItem(id, index, model); if (canAddRowItemDoubleClickBehavior(model)) { item.add(new AjaxEventBehavior("ondblclick") { @Override protected void onEvent(AjaxRequestTarget target) { onRowItemEvent(id, index, model, target); } @Override protected IAjaxCallDecorator getAjaxCallDecorator() { return new CancelDefaultDecorator(); } }); } return item; } } /** * Controls whether the double-click link behavior should be added the row item * * @param model Model to check with * @return True if the double-click behavior should added to the given row item */ protected boolean canAddRowItemDoubleClickBehavior(IModel<T> model) { return true; } protected void addLinks(List<AbstractLink> links, final T itemObject, String linkId) { // add edit link TitledAjaxLink editLink = getEditItemLink(itemObject, linkId); editLink.add(new CssClass("icon-link")); editLink.add(new CssClass("UpdateAction")); links.add(editLink); // add delete link if (canAddDeleteItemLink(itemObject)) { TitledAjaxLink deleteLink = new TitledAjaxLink(linkId, "Delete") { @Override public void onClick(AjaxRequestTarget target) { deleteItem(itemObject, target); target.add(BaseListPanel.this); } @Override protected IAjaxCallDecorator getAjaxCallDecorator() { return new ConfirmationAjaxCallDecorator(getDeleteConfirmationText(itemObject)); } }; deleteLink.add(new CssClass("icon-link")); deleteLink.add(new CssClass("DeleteAction")); links.add(deleteLink); } } protected boolean canAddDeleteItemLink(T itemObject) { return true; } }
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.primitives; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import java.io.Serializable; import java.util.AbstractList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.RandomAccess; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkElementIndex; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkPositionIndexes; /** * Static utility methods pertaining to {@code long} primitives, that are not * already found in either {@link Long} or {@link Arrays}. * * @author Kevin Bourrillion * @since 2009.09.15 <b>tentative</b> */ @GwtCompatible public final class Longs { private Longs() {} /** * The number of bytes required to represent a primitive {@code long} * value. */ public static final int BYTES = Long.SIZE / Byte.SIZE; /** * Returns a hash code for {@code value}; equal to the result of invoking * {@code ((Long) value).hashCode()}. * * @param value a primitive {@code long} value * @return a hash code for the value */ public static int hashCode(long value) { return (int) (value ^ (value >>> 32)); } /** * Compares the two specified {@code long} values. The sign of the value * returned is the same as that of {@code ((Long) a).compareTo(b)}. * * @param a the first {@code long} to compare * @param b the second {@code long} to compare * @return a negative value if {@code a} is less than {@code b}; a positive * value if {@code a} is greater than {@code b}; or zero if they are equal */ public static int compare(long a, long b) { return (a < b) ? -1 : ((a > b) ? 1 : 0); } /** * Returns {@code true} if {@code target} is present as an element anywhere in * {@code array}. * * @param array an array of {@code long} values, possibly empty * @param target a primitive {@code long} value * @return {@code true} if {@code array[i] == target} for some value of {@code * i} */ public static boolean contains(long[] array, long target) { for (long value : array) { if (value == target) { return true; } } return false; } /** * Returns the index of the first appearance of the value {@code target} in * {@code array}. * * @param array an array of {@code long} values, possibly empty * @param target a primitive {@code long} value * @return the least index {@code i} for which {@code array[i] == target}, or * {@code -1} if no such index exists. */ public static int indexOf(long[] array, long target) { return indexOf(array, target, 0, array.length); } // TODO: consider making this public private static int indexOf( long[] array, long target, int start, int end) { for (int i = start; i < end; i++) { if (array[i] == target) { return i; } } return -1; } /** * Returns the start position of the first occurrence of the specified {@code * target} within {@code array}, or {@code -1} if there is no such occurrence. * * <p>More formally, returns the lowest index {@code i} such that {@code * java.util.Arrays.copyOfRange(array, i, i + target.length)} contains exactly * the same elements as {@code target}. * * @param array the array to search for the sequence {@code target} * @param target the array to search for as a sub-sequence of {@code array} */ public static int indexOf(long[] array, long[] target) { checkNotNull(array, "array"); checkNotNull(target, "target"); if (target.length == 0) { return 0; } outer: for (int i = 0; i < array.length - target.length + 1; i++) { for (int j = 0; j < target.length; j++) { if (array[i + j] != target[j]) { continue outer; } } return i; } return -1; } /** * Returns the index of the last appearance of the value {@code target} in * {@code array}. * * @param array an array of {@code long} values, possibly empty * @param target a primitive {@code long} value * @return the greatest index {@code i} for which {@code array[i] == target}, * or {@code -1} if no such index exists. */ public static int lastIndexOf(long[] array, long target) { return lastIndexOf(array, target, 0, array.length); } // TODO: consider making this public private static int lastIndexOf( long[] array, long target, int start, int end) { for (int i = end - 1; i >= start; i--) { if (array[i] == target) { return i; } } return -1; } /** * Returns the least value present in {@code array}. * * @param array a <i>nonempty</i> array of {@code long} values * @return the value present in {@code array} that is less than or equal to * every other value in the array * @throws IllegalArgumentException if {@code array} is empty */ public static long min(long... array) { checkArgument(array.length > 0); long min = array[0]; for (int i = 1; i < array.length; i++) { if (array[i] < min) { min = array[i]; } } return min; } /** * Returns the greatest value present in {@code array}. * * @param array a <i>nonempty</i> array of {@code long} values * @return the value present in {@code array} that is greater than or equal to * every other value in the array * @throws IllegalArgumentException if {@code array} is empty */ public static long max(long... array) { checkArgument(array.length > 0); long max = array[0]; for (int i = 1; i < array.length; i++) { if (array[i] > max) { max = array[i]; } } return max; } /** * Returns the values from each provided array combined into a single array. * For example, {@code concat(new long[] {a, b}, new long[] {}, new * long[] {c}} returns the array {@code {a, b, c}}. * * @param arrays zero or more {@code long} arrays * @return a single array containing all the values from the source arrays, in * order */ public static long[] concat(long[]... arrays) { int length = 0; for (long[] array : arrays) { length += array.length; } long[] result = new long[length]; int pos = 0; for (long[] array : arrays) { System.arraycopy(array, 0, result, pos, array.length); pos += array.length; } return result; } /** * Returns a big-endian representation of {@code value} in an 8-element byte * array; equivalent to {@code ByteBuffer.allocate(8).putLong(value).array()}. * For example, the input value {@code 0x1213141516171819L} would yield the * byte array {@code {0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19}}. * * <p>If you need to convert and concatenate several values (possibly even of * different types), use a shared {@link java.nio.ByteBuffer} instance, or use * {@link com.google.common.io.ByteStreams#newDataOutput()} to get a growable * buffer. * * <p><b>Warning:</b> do not use this method in GWT. It returns wrong answers. */ @GwtIncompatible("doesn't work") public static byte[] toByteArray(long value) { return new byte[] { (byte) (value >> 56), (byte) (value >> 48), (byte) (value >> 40), (byte) (value >> 32), (byte) (value >> 24), (byte) (value >> 16), (byte) (value >> 8), (byte) value}; } /** * Returns the {@code long} value whose big-endian representation is * stored in the first 8 bytes of {@code bytes}; equivalent to {@code * ByteBuffer.wrap(bytes).getLong()}. For example, the input byte array * {@code {0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19}} would yield the * {@code long} value {@code 0x1213141516171819L}. * * <p>Arguably, it's preferable to use {@link java.nio.ByteBuffer}; that * library exposes much more flexibility at little cost in readability. * * <p><b>Warning:</b> do not use this method in GWT. It returns wrong answers. * * @throws IllegalArgumentException if {@code bytes} has fewer than 8 * elements */ @GwtIncompatible("doesn't work") public static long fromByteArray(byte[] bytes) { checkArgument(bytes.length >= BYTES, "array too small: %s < %s", bytes.length, BYTES); return (bytes[0] & 0xFFL) << 56 | (bytes[1] & 0xFFL) << 48 | (bytes[2] & 0xFFL) << 40 | (bytes[3] & 0xFFL) << 32 | (bytes[4] & 0xFFL) << 24 | (bytes[5] & 0xFFL) << 16 | (bytes[6] & 0xFFL) << 8 | (bytes[7] & 0xFFL); } /** * Returns an array containing the same values as {@code array}, but * guaranteed to be of a specified minimum length. If {@code array} already * has a length of at least {@code minLength}, it is returned directly. * Otherwise, a new array of size {@code minLength + padding} is returned, * containing the values of {@code array}, and zeroes in the remaining places. * * @param array the source array * @param minLength the minimum length the returned array must guarantee * @param padding an extra amount to "grow" the array by if growth is * necessary * @throws IllegalArgumentException if {@code minLength} or {@code padding} is * negative * @return an array containing the values of {@code array}, with guaranteed * minimum length {@code minLength} */ public static long[] ensureCapacity( long[] array, int minLength, int padding) { checkArgument(minLength >= 0, "Invalid minLength: %s", minLength); checkArgument(padding >= 0, "Invalid padding: %s", padding); return (array.length < minLength) ? copyOf(array, minLength + padding) : array; } // Arrays.copyOf() requires Java 6 private static long[] copyOf(long[] original, int length) { long[] copy = new long[length]; System.arraycopy(original, 0, copy, 0, Math.min(original.length, length)); return copy; } /** * Returns a string containing the supplied {@code long} values separated * by {@code separator}. For example, {@code join("-", 1L, 2L, 3L)} returns * the string {@code "1-2-3"}. * * @param separator the text that should appear between consecutive values in * the resulting string (but not at the start or end) * @param array an array of {@code long} values, possibly empty */ public static String join(String separator, long... array) { checkNotNull(separator); if (array.length == 0) { return ""; } // For pre-sizing a builder, just get the right order of magnitude StringBuilder builder = new StringBuilder(array.length * 10); builder.append(array[0]); for (int i = 1; i < array.length; i++) { builder.append(separator).append(array[i]); } return builder.toString(); } /** * Returns a comparator that compares two {@code long} arrays * lexicographically. That is, it compares, using {@link * #compare(long, long)}), the first pair of values that follow any * common prefix, or when one array is a prefix of the other, treats the * shorter array as the lesser. For example, * {@code [] < [1L] < [1L, 2L] < [2L]}. * * <p>The returned comparator is inconsistent with {@link * Object#equals(Object)} (since arrays support only identity equality), but * it is consistent with {@link Arrays#equals(long[], long[])}. * * @see <a href="http://en.wikipedia.org/wiki/Lexicographical_order"> * Lexicographical order</a> article at Wikipedia * @since 2010.01.04 <b>tentative</b> */ public static Comparator<long[]> lexicographicalComparator() { return LexicographicalComparator.INSTANCE; } private enum LexicographicalComparator implements Comparator<long[]> { INSTANCE; public int compare(long[] left, long[] right) { int minLength = Math.min(left.length, right.length); for (int i = 0; i < minLength; i++) { int result = Longs.compare(left[i], right[i]); if (result != 0) { return result; } } return left.length - right.length; } } /** * Copies a collection of {@code Long} instances into a new array of * primitive {@code long} values. * * <p>Elements are copied from the argument collection as if by {@code * collection.toArray()}. Calling this method is as thread-safe as calling * that method. * * @param collection a collection of {@code Long} objects * @return an array containing the same values as {@code collection}, in the * same order, converted to primitives * @throws NullPointerException if {@code collection} or any of its elements * is null */ public static long[] toArray(Collection<Long> collection) { if (collection instanceof LongArrayAsList) { return ((LongArrayAsList) collection).toLongArray(); } Object[] boxedArray = collection.toArray(); int len = boxedArray.length; long[] array = new long[len]; for (int i = 0; i < len; i++) { array[i] = (Long) boxedArray[i]; } return array; } /** * Returns a fixed-size list backed by the specified array, similar to {@link * Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)}, * but any attempt to set a value to {@code null} will result in a {@link * NullPointerException}. * * <p>The returned list maintains the values, but not the identities, of * {@code Long} objects written to or read from it. For example, whether * {@code list.get(0) == list.get(0)} is true for the returned list is * unspecified. * * @param backingArray the array to back the list * @return a list view of the array */ public static List<Long> asList(long... backingArray) { if (backingArray.length == 0) { return Collections.emptyList(); } return new LongArrayAsList(backingArray); } @GwtCompatible private static class LongArrayAsList extends AbstractList<Long> implements RandomAccess, Serializable { final long[] array; final int start; final int end; LongArrayAsList(long[] array) { this(array, 0, array.length); } LongArrayAsList(long[] array, int start, int end) { this.array = array; this.start = start; this.end = end; } @Override public int size() { return end - start; } @Override public boolean isEmpty() { return false; } @Override public Long get(int index) { checkElementIndex(index, size()); return array[start + index]; } @Override public boolean contains(Object target) { // Overridden to prevent a ton of boxing return (target instanceof Long) && Longs.indexOf(array, (Long) target, start, end) != -1; } @Override public int indexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Long) { int i = Longs.indexOf(array, (Long) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public int lastIndexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Long) { int i = Longs.lastIndexOf(array, (Long) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public Long set(int index, Long element) { checkElementIndex(index, size()); long oldValue = array[start + index]; array[start + index] = element; return oldValue; } /** In GWT, List and AbstractList do not have the subList method. */ /*@Override*/ public List<Long> subList(int fromIndex, int toIndex) { int size = size(); checkPositionIndexes(fromIndex, toIndex, size); if (fromIndex == toIndex) { return Collections.emptyList(); } return new LongArrayAsList(array, start + fromIndex, start + toIndex); } @Override public boolean equals(Object object) { if (object == this) { return true; } if (object instanceof LongArrayAsList) { LongArrayAsList that = (LongArrayAsList) object; int size = size(); if (that.size() != size) { return false; } for (int i = 0; i < size; i++) { if (array[start + i] != that.array[that.start + i]) { return false; } } return true; } return super.equals(object); } @Override public int hashCode() { int result = 1; for (int i = start; i < end; i++) { result = 31 * result + Longs.hashCode(array[i]); } return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(size() * 10); builder.append('[').append(array[start]); for (int i = start + 1; i < end; i++) { builder.append(", ").append(array[i]); } return builder.append(']').toString(); } long[] toLongArray() { // Arrays.copyOfRange() requires Java 6 int size = size(); long[] result = new long[size]; System.arraycopy(array, start, result, 0, size); return result; } private static final long serialVersionUID = 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.render; import java.awt.Color; import java.awt.Rectangle; import java.awt.geom.AffineTransform; import java.awt.geom.Rectangle2D; import java.util.List; import java.util.Map; import org.w3c.dom.Document; import org.apache.batik.parser.AWTTransformProducer; import org.apache.xmlgraphics.image.loader.ImageSize; import org.apache.xmlgraphics.util.QName; import org.apache.xmlgraphics.util.UnitConv; import org.apache.fop.apps.FOUserAgent; import org.apache.fop.area.Area; import org.apache.fop.area.Block; import org.apache.fop.area.BlockViewport; import org.apache.fop.area.CTM; import org.apache.fop.area.NormalFlow; import org.apache.fop.area.RegionReference; import org.apache.fop.area.RegionViewport; import org.apache.fop.area.Trait; import org.apache.fop.area.inline.ForeignObject; import org.apache.fop.area.inline.InlineArea; import org.apache.fop.area.inline.InlineViewport; import org.apache.fop.fo.Constants; import org.apache.fop.fo.extensions.ExtensionElementMapping; import org.apache.fop.fonts.FontMetrics; import org.apache.fop.traits.BorderProps; /** * Abstract base class for renderers like PDF and PostScript where many painting operations * follow similar patterns which makes it possible to share some code. */ public abstract class AbstractPathOrientedRenderer extends PrintRenderer { /** * @param userAgent the user agent that contains configuration details. This cannot be null. */ public AbstractPathOrientedRenderer(FOUserAgent userAgent) { super(userAgent); } /** * Handle block traits. * The block could be any sort of block with any positioning * so this should render the traits such as border and background * in its position. * * @param block the block to render the traits */ protected void handleBlockTraits(Block block) { float borderPaddingStart = block.getBorderAndPaddingWidthStart() / 1000f; float borderPaddingEnd = block.getBorderAndPaddingWidthEnd() / 1000f; float borderPaddingBefore = block.getBorderAndPaddingWidthBefore() / 1000f; float borderPaddingAfter = block.getBorderAndPaddingWidthAfter() / 1000f; float startx = currentIPPosition / 1000f; float starty = currentBPPosition / 1000f; float width = block.getIPD() / 1000f; float height = block.getBPD() / 1000f; int level = block.getBidiLevel(); if ((level == -1) || ((level & 1) == 0)) { startx += block.getStartIndent() / 1000f; startx -= borderPaddingStart; } else { startx += block.getEndIndent() / 1000f; startx -= borderPaddingEnd; } width += borderPaddingStart; width += borderPaddingEnd; height += borderPaddingBefore; height += borderPaddingAfter; drawBackAndBorders(block, startx, starty, width, height); } /** * Handle the traits for a region * This is used to draw the traits for the given page region. * (See Sect. 6.4.1.2 of XSL-FO spec.) * @param region the RegionViewport whose region is to be drawn */ protected void handleRegionTraits(RegionViewport region) { Rectangle2D viewArea = region.getViewArea(); RegionReference referenceArea = region.getRegionReference(); float startx = (float)(viewArea.getX() / 1000f); float starty = (float)(viewArea.getY() / 1000f); float width = (float)(viewArea.getWidth() / 1000f); float height = (float)(viewArea.getHeight() / 1000f); // adjust the current position according to region borders and padding currentBPPosition = referenceArea.getBorderAndPaddingWidthBefore(); int level = region.getBidiLevel(); if ((level == -1) || ((level & 1) == 0)) { currentIPPosition = referenceArea.getBorderAndPaddingWidthStart(); } else { currentIPPosition = referenceArea.getBorderAndPaddingWidthEnd(); } // draw background (traits are in the RegionViewport) // and borders (traits are in the RegionReference) drawBackAndBorders(region, referenceArea, startx, starty, width, height); } /** * Draw the background and borders. * This draws the background and border traits for an area given * the position. * * @param area the area to get the traits from * @param startx the start x position * @param starty the start y position * @param width the width of the area * @param height the height of the area */ protected void drawBackAndBorders(Area area, float startx, float starty, float width, float height) { drawBackAndBorders(area, area, startx, starty, width, height); } /** * Draw the background and borders. * This draws the background and border traits for an area given * the position. * * @param backgroundArea the area to get the background traits from * @param borderArea the area to get the border traits from * @param startx the start x position * @param starty the start y position * @param width the width of the area * @param height the height of the area */ protected void drawBackAndBorders(Area backgroundArea, Area borderArea, float startx, float starty, float width, float height) { // draw background then border BorderProps bpsBefore = (BorderProps)borderArea.getTrait(Trait.BORDER_BEFORE); BorderProps bpsAfter = (BorderProps)borderArea.getTrait(Trait.BORDER_AFTER); BorderProps bpsStart = (BorderProps)borderArea.getTrait(Trait.BORDER_START); BorderProps bpsEnd = (BorderProps)borderArea.getTrait(Trait.BORDER_END); Trait.Background backgroundTrait = (Trait.Background)backgroundArea.getTrait(Trait.BACKGROUND); drawBackground(startx, starty, width, height, (Trait.Background) backgroundArea.getTrait(Trait.BACKGROUND), bpsBefore, bpsAfter, bpsStart, bpsEnd, backgroundArea.getBidiLevel()); // TODO what is the default bg color? Should we serialize it? Color bg = Color.white; if (backgroundTrait != null && backgroundTrait.getColor() != null) { bg = backgroundTrait.getColor(); } drawBorders(startx, starty, width, height, bpsBefore, bpsAfter, bpsStart, bpsEnd, backgroundArea.getBidiLevel(), bg); } /** * Draw the background. * This draws the background given the position and the traits. * * @param startx the start x position * @param starty the start y position * @param width the width of the area * @param height the height of the area * @param back the background traits * @param bpsBefore the border-before traits * @param bpsAfter the border-after traits * @param bpsStart the border-start traits * @param bpsEnd the border-end traits * @param level of bidirectional embedding */ protected void drawBackground(float startx, float starty, float width, float height, Trait.Background back, BorderProps bpsBefore, BorderProps bpsAfter, BorderProps bpsStart, BorderProps bpsEnd, int level) { BorderProps bpsTop = bpsBefore; BorderProps bpsBottom = bpsAfter; BorderProps bpsLeft; BorderProps bpsRight; if ((level == -1) || ((level & 1) == 0)) { bpsLeft = bpsStart; bpsRight = bpsEnd; } else { bpsLeft = bpsEnd; bpsRight = bpsStart; } drawBackground(startx, starty, width, height, back, bpsTop, bpsBottom, bpsLeft, bpsRight); } /** * Draw the background. * This draws the background given the position and the traits. * * @param startx the start x position * @param starty the start y position * @param width the width of the area * @param height the height of the area * @param back the background traits * @param bpsTop the border specification on the top edge * @param bpsBottom the border traits associated with bottom edge * @param bpsLeft the border specification on the left edge * @param bpsRight the border specification on the right edge */ protected void drawBackground(float startx, float starty, float width, float height, Trait.Background back, BorderProps bpsTop, BorderProps bpsBottom, BorderProps bpsLeft, BorderProps bpsRight) { if (back != null) { endTextObject(); //Calculate padding rectangle float sx = startx; float sy = starty; float paddRectWidth = width; float paddRectHeight = height; if (bpsLeft != null) { sx += bpsLeft.width / 1000f; paddRectWidth -= bpsLeft.width / 1000f; } if (bpsTop != null) { sy += bpsTop.width / 1000f; paddRectHeight -= bpsTop.width / 1000f; } if (bpsRight != null) { paddRectWidth -= bpsRight.width / 1000f; } if (bpsBottom != null) { paddRectHeight -= bpsBottom.width / 1000f; } saveGraphicsState(); clipBackground(sx, sy, paddRectWidth, paddRectHeight, bpsTop, bpsBottom, bpsLeft, bpsRight); if (back.getColor() != null) { updateColor(back.getColor(), true); fillRect(sx, sy, paddRectWidth, paddRectHeight); } if (back.getImageInfo() != null) { ImageSize imageSize = back.getImageInfo().getSize(); int targetWidth = imageSize.getWidthMpt(); int targetHeight = imageSize.getHeightMpt(); double multiplier = 1.0; if (back.getImageTargetWidth() != 0 && back.getImageTargetHeight() != 0) { multiplier = Math.min(1.0 * back.getImageTargetWidth() / targetWidth, 1.0 * back.getImageTargetHeight() / targetHeight); } else if (back.getImageTargetHeight() != 0) { multiplier = 1.0 * back.getImageTargetHeight() / targetHeight; } else if (back.getImageTargetWidth() != 0) { multiplier = 1.0 * back.getImageTargetWidth() / targetWidth; } targetWidth = (int) (targetWidth * multiplier); targetHeight = (int) (targetHeight * multiplier); int horzCount = (int) ((paddRectWidth * 1000 / targetWidth) + 1.0f); int vertCount = (int) ((paddRectHeight * 1000 / targetHeight) + 1.0f); if (back.getRepeat() == EN_NOREPEAT) { horzCount = 1; vertCount = 1; } else if (back.getRepeat() == EN_REPEATX) { vertCount = 1; } else if (back.getRepeat() == EN_REPEATY) { horzCount = 1; } //change from points to millipoints sx *= 1000; sy *= 1000; if (horzCount == 1) { sx += back.getHoriz(); } if (vertCount == 1) { sy += back.getVertical(); } for (int x = 0; x < horzCount; x++) { for (int y = 0; y < vertCount; y++) { // place once Rectangle2D pos; // Image positions are relative to the currentIP/BP pos = new Rectangle2D.Float(sx - currentIPPosition + (x * targetWidth), sy - currentBPPosition + (y * targetHeight), targetWidth, targetHeight); drawImage(back.getURL(), pos); } } } restoreGraphicsState(); } } /** * TODO represent border related parameters in a class * Clip the background to the inner border. * This draws the border traits given the position and the traits. * * @param startx the start x position * @param starty the start y position * @param width the width of the area * @param height the height of the area * @param bpsBefore the border-before traits * @param bpsAfter the border-after traits * @param bpsStart the border-start traits * @param bpsEnd the border-end traits */ protected void clipBackground(float startx, float starty, float width, float height, BorderProps bpsBefore, BorderProps bpsAfter, BorderProps bpsStart, BorderProps bpsEnd) { clipRect(startx, starty, width, height); } /** * Draw the borders. * This draws the border traits given the position and the traits. * * @param startx the start x position * @param starty the start y position * @param width the width of the area * @param height the height of the area * @param bpsBefore the border traits associated with before edge * @param bpsAfter the border traits associated with after edge * @param bpsStart the border traits associated with start edge * @param bpsEnd the border traits associated with end edge * @param level of bidirectional embedding * @param innerBackgroundColor the background color of the block */ protected void drawBorders(float startx, float starty, float width, float height, BorderProps bpsBefore, BorderProps bpsAfter, BorderProps bpsStart, BorderProps bpsEnd, int level, Color innerBackgroundColor) { Rectangle2D.Float borderRect = new Rectangle2D.Float(startx, starty, width, height); BorderProps bpsTop = bpsBefore; BorderProps bpsBottom = bpsAfter; BorderProps bpsLeft; BorderProps bpsRight; if ((level == -1) || ((level & 1) == 0)) { bpsLeft = bpsStart; bpsRight = bpsEnd; } else { bpsLeft = bpsEnd; bpsRight = bpsStart; } drawBorders(borderRect, bpsTop, bpsBottom, bpsLeft, bpsRight, innerBackgroundColor); } private static final int TOP = 0; private static final int RIGHT = 1; private static final int BOTTOM = 2; private static final int LEFT = 3; /** * Draws borders. * @param borderRect the border rectangle * @param bpsTop the border specification on the top edge * @param bpsBottom the border traits associated with bottom edge * @param bpsLeft the border specification on the left edge * @param bpsRight the border specification on the right edge * @param innerBackgroundColor the background color of the block */ protected void drawBorders(Rectangle2D.Float borderRect, BorderProps bpsTop, BorderProps bpsBottom, BorderProps bpsLeft, BorderProps bpsRight, Color innerBackgroundColor) { //TODO generalize each of the four conditions into using a parameterized drawBorder() boolean[] border = new boolean[] { (bpsTop != null), (bpsRight != null), (bpsBottom != null), (bpsLeft != null)}; float startx = borderRect.x; float starty = borderRect.y; float width = borderRect.width; float height = borderRect.height; float[] borderWidth = new float[] { (border[TOP] ? bpsTop.width / 1000f : 0.0f), (border[RIGHT] ? bpsRight.width / 1000f : 0.0f), (border[BOTTOM] ? bpsBottom.width / 1000f : 0.0f), (border[LEFT] ? bpsLeft.width / 1000f : 0.0f)}; float[] clipw = new float[] { BorderProps.getClippedWidth(bpsTop) / 1000f, BorderProps.getClippedWidth(bpsRight) / 1000f, BorderProps.getClippedWidth(bpsBottom) / 1000f, BorderProps.getClippedWidth(bpsLeft) / 1000f}; starty += clipw[TOP]; height -= clipw[TOP]; height -= clipw[BOTTOM]; startx += clipw[LEFT]; width -= clipw[LEFT]; width -= clipw[RIGHT]; boolean[] slant = new boolean[] { (border[LEFT] && border[TOP]), (border[TOP] && border[RIGHT]), (border[RIGHT] && border[BOTTOM]), (border[BOTTOM] && border[LEFT])}; if (bpsTop != null) { endTextObject(); float sx1 = startx; float sx2 = (slant[TOP] ? sx1 + borderWidth[LEFT] - clipw[LEFT] : sx1); float ex1 = startx + width; float ex2 = (slant[RIGHT] ? ex1 - borderWidth[RIGHT] + clipw[RIGHT] : ex1); float outery = starty - clipw[TOP]; float clipy = outery + clipw[TOP]; float innery = outery + borderWidth[TOP]; saveGraphicsState(); moveTo(sx1, clipy); float sx1a = sx1; float ex1a = ex1; if (isCollapseOuter(bpsTop)) { if (isCollapseOuter(bpsLeft)) { sx1a -= clipw[LEFT]; } if (isCollapseOuter(bpsRight)) { ex1a += clipw[RIGHT]; } lineTo(sx1a, outery); lineTo(ex1a, outery); } lineTo(ex1, clipy); lineTo(ex2, innery); lineTo(sx2, innery); closePath(); clip(); drawBorderLine(sx1a, outery, ex1a, innery, true, true, bpsTop.style, bpsTop.color); restoreGraphicsState(); } if (bpsRight != null) { endTextObject(); float sy1 = starty; float sy2 = (slant[RIGHT] ? sy1 + borderWidth[TOP] - clipw[TOP] : sy1); float ey1 = starty + height; float ey2 = (slant[BOTTOM] ? ey1 - borderWidth[BOTTOM] + clipw[BOTTOM] : ey1); float outerx = startx + width + clipw[RIGHT]; float clipx = outerx - clipw[RIGHT]; float innerx = outerx - borderWidth[RIGHT]; saveGraphicsState(); moveTo(clipx, sy1); float sy1a = sy1; float ey1a = ey1; if (isCollapseOuter(bpsRight)) { if (isCollapseOuter(bpsTop)) { sy1a -= clipw[TOP]; } if (isCollapseOuter(bpsBottom)) { ey1a += clipw[BOTTOM]; } lineTo(outerx, sy1a); lineTo(outerx, ey1a); } lineTo(clipx, ey1); lineTo(innerx, ey2); lineTo(innerx, sy2); closePath(); clip(); drawBorderLine(innerx, sy1a, outerx, ey1a, false, false, bpsRight.style, bpsRight.color); restoreGraphicsState(); } if (bpsBottom != null) { endTextObject(); float sx1 = startx; float sx2 = (slant[LEFT] ? sx1 + borderWidth[LEFT] - clipw[LEFT] : sx1); float ex1 = startx + width; float ex2 = (slant[BOTTOM] ? ex1 - borderWidth[RIGHT] + clipw[RIGHT] : ex1); float outery = starty + height + clipw[BOTTOM]; float clipy = outery - clipw[BOTTOM]; float innery = outery - borderWidth[BOTTOM]; saveGraphicsState(); moveTo(ex1, clipy); float sx1a = sx1; float ex1a = ex1; if (isCollapseOuter(bpsBottom)) { if (isCollapseOuter(bpsLeft)) { sx1a -= clipw[LEFT]; } if (isCollapseOuter(bpsRight)) { ex1a += clipw[RIGHT]; } lineTo(ex1a, outery); lineTo(sx1a, outery); } lineTo(sx1, clipy); lineTo(sx2, innery); lineTo(ex2, innery); closePath(); clip(); drawBorderLine(sx1a, innery, ex1a, outery, true, false, bpsBottom.style, bpsBottom.color); restoreGraphicsState(); } if (bpsLeft != null) { endTextObject(); float sy1 = starty; float sy2 = (slant[TOP] ? sy1 + borderWidth[TOP] - clipw[TOP] : sy1); float ey1 = sy1 + height; float ey2 = (slant[LEFT] ? ey1 - borderWidth[BOTTOM] + clipw[BOTTOM] : ey1); float outerx = startx - clipw[LEFT]; float clipx = outerx + clipw[LEFT]; float innerx = outerx + borderWidth[LEFT]; saveGraphicsState(); moveTo(clipx, ey1); float sy1a = sy1; float ey1a = ey1; if (isCollapseOuter(bpsLeft)) { if (isCollapseOuter(bpsTop)) { sy1a -= clipw[TOP]; } if (isCollapseOuter(bpsBottom)) { ey1a += clipw[BOTTOM]; } lineTo(outerx, ey1a); lineTo(outerx, sy1a); } lineTo(clipx, sy1); lineTo(innerx, sy2); lineTo(innerx, ey2); closePath(); clip(); drawBorderLine(outerx, sy1a, innerx, ey1a, false, true, bpsLeft.style, bpsLeft.color); restoreGraphicsState(); } } private boolean isCollapseOuter(BorderProps bp) { return bp != null && bp.isCollapseOuter(); } /** * Common method to render the background and borders for any inline area. * The all borders and padding are drawn outside the specified area. * @param area the inline area for which the background, border and padding is to be * rendered */ protected void renderInlineAreaBackAndBorders(InlineArea area) { float borderPaddingStart = area.getBorderAndPaddingWidthStart() / 1000f; float borderPaddingEnd = area.getBorderAndPaddingWidthEnd() / 1000f; float borderPaddingBefore = area.getBorderAndPaddingWidthBefore() / 1000f; float borderPaddingAfter = area.getBorderAndPaddingWidthAfter() / 1000f; float bpwidth = borderPaddingStart + borderPaddingEnd; float bpheight = borderPaddingBefore + borderPaddingAfter; float height = area.getBPD() / 1000f; if (height != 0.0f || bpheight != 0.0f && bpwidth != 0.0f) { float x = currentIPPosition / 1000f; float y = (currentBPPosition + area.getBlockProgressionOffset()) / 1000f; float width = area.getIPD() / 1000f; drawBackAndBorders(area, x, y - borderPaddingBefore , width + bpwidth , height + bpheight); } } /** Constant for the fox:transform extension attribute */ protected static final QName FOX_TRANSFORM = new QName(ExtensionElementMapping.URI, "fox:transform"); /** {@inheritDoc} */ protected void renderBlockViewport(BlockViewport bv, List children) { // clip and position viewport if necessary // save positions int saveIP = currentIPPosition; int saveBP = currentBPPosition; CTM ctm = bv.getCTM(); int borderPaddingBefore = bv.getBorderAndPaddingWidthBefore(); int positioning = bv.getPositioning(); if (positioning == Block.ABSOLUTE || positioning == Block.FIXED) { //For FIXED, we need to break out of the current viewports to the //one established by the page. We save the state stack for restoration //after the block-container has been painted. See below. List breakOutList = null; if (positioning == Block.FIXED) { breakOutList = breakOutOfStateStack(); } AffineTransform positionTransform = new AffineTransform(); positionTransform.translate(bv.getXOffset(), bv.getYOffset()); int level = bv.getBidiLevel(); int borderPaddingStart = bv.getBorderAndPaddingWidthStart(); int borderPaddingEnd = bv.getBorderAndPaddingWidthEnd(); //"left/"top" (bv.getX/YOffset()) specify the position of the content rectangle if ((level == -1) || ((level & 1) == 0)) { positionTransform.translate(-borderPaddingStart, -borderPaddingBefore); } else { positionTransform.translate(-borderPaddingEnd, -borderPaddingBefore); } //Free transformation for the block-container viewport String transf; transf = bv.getForeignAttributeValue(FOX_TRANSFORM); if (transf != null) { AffineTransform freeTransform = AWTTransformProducer.createAffineTransform(transf); positionTransform.concatenate(freeTransform); } //Viewport position if (!positionTransform.isIdentity()) { establishTransformationMatrix(positionTransform); } //This is the content-rect float width = bv.getIPD() / 1000f; float height = bv.getBPD() / 1000f; //Background and borders float borderPaddingWidth = (borderPaddingStart + borderPaddingEnd) / 1000f; float borderPaddingHeight = (borderPaddingBefore + bv.getBorderAndPaddingWidthAfter()) / 1000f; drawBackAndBorders(bv, 0, 0, width + borderPaddingWidth, height + borderPaddingHeight); //Shift to content rectangle after border painting AffineTransform contentRectTransform = new AffineTransform(); if ((level == -1) || ((level & 1) == 0)) { contentRectTransform.translate(borderPaddingStart, borderPaddingBefore); } else { contentRectTransform.translate(borderPaddingEnd, borderPaddingBefore); } if (!contentRectTransform.isIdentity()) { establishTransformationMatrix(contentRectTransform); } //Clipping if (bv.hasClip()) { clipRect(0f, 0f, width, height); } //Set up coordinate system for content rectangle AffineTransform contentTransform = ctm.toAffineTransform(); if (!contentTransform.isIdentity()) { establishTransformationMatrix(contentTransform); } currentIPPosition = 0; currentBPPosition = 0; renderBlocks(bv, children); if (!contentTransform.isIdentity()) { restoreGraphicsState(); } if (!contentRectTransform.isIdentity()) { restoreGraphicsState(); } if (!positionTransform.isIdentity()) { restoreGraphicsState(); } //For FIXED, we need to restore break out now we are done if (positioning == Block.FIXED) { if (breakOutList != null) { restoreStateStackAfterBreakOut(breakOutList); } } currentIPPosition = saveIP; currentBPPosition = saveBP; } else { currentBPPosition += bv.getSpaceBefore(); //borders and background in the old coordinate system handleBlockTraits(bv); //Advance to start of content area currentIPPosition += bv.getStartIndent(); CTM tempctm = new CTM(containingIPPosition, currentBPPosition); ctm = tempctm.multiply(ctm); //Now adjust for border/padding currentBPPosition += borderPaddingBefore; Rectangle clippingRect = null; if (bv.hasClip()) { clippingRect = new Rectangle(currentIPPosition, currentBPPosition, bv.getIPD(), bv.getBPD()); } startVParea(ctm, clippingRect); currentIPPosition = 0; currentBPPosition = 0; renderBlocks(bv, children); endVParea(); currentIPPosition = saveIP; currentBPPosition = saveBP; currentBPPosition += (bv.getAllocBPD()); } } /** {@inheritDoc} */ protected void renderReferenceArea(Block block) { // save position and offset int saveIP = currentIPPosition; int saveBP = currentBPPosition; int saveBO = getBeginOffset(); //Establish a new coordinate system AffineTransform at = new AffineTransform(); at.translate(currentIPPosition, currentBPPosition); at.translate(block.getXOffset(), block.getYOffset()); at.translate(0, block.getSpaceBefore()); setBeginOffset(saveBO - block.getXOffset()); if (!at.isIdentity()) { establishTransformationMatrix(at); } currentIPPosition = 0; currentBPPosition = 0; handleBlockTraits(block); List children = block.getChildAreas(); if (children != null) { renderBlocks(block, children); } if (!at.isIdentity()) { restoreGraphicsState(); } setBeginOffset(saveBO); // stacked and relative blocks effect stacking currentIPPosition = saveIP; currentBPPosition = saveBP; } /** {@inheritDoc} */ protected void renderFlow(NormalFlow flow) { // save position and offset int saveIP = currentIPPosition; int saveBP = currentBPPosition; //Establish a new coordinate system AffineTransform at = new AffineTransform(); at.translate(currentIPPosition, currentBPPosition); if (!at.isIdentity()) { establishTransformationMatrix(at); } currentIPPosition = 0; currentBPPosition = 0; super.renderFlow(flow); if (!at.isIdentity()) { restoreGraphicsState(); } // stacked and relative blocks effect stacking currentIPPosition = saveIP; currentBPPosition = saveBP; } /** * Concatenates the current transformation matrix with the given one, therefore establishing * a new coordinate system. * @param at the transformation matrix to process (coordinates in points) */ protected abstract void concatenateTransformationMatrix(AffineTransform at); /** * Render an inline viewport. * This renders an inline viewport by clipping if necessary. * @param viewport the viewport to handle */ public void renderInlineViewport(InlineViewport viewport) { int level = viewport.getBidiLevel(); float x = currentIPPosition / 1000f; float y = (currentBPPosition + viewport.getBlockProgressionOffset()) / 1000f; float width = viewport.getIPD() / 1000f; float height = viewport.getBPD() / 1000f; // TODO: Calculate the border rect correctly. float borderPaddingStart = viewport.getBorderAndPaddingWidthStart() / 1000f; float borderPaddingEnd = viewport.getBorderAndPaddingWidthEnd() / 1000f; float borderPaddingBefore = viewport.getBorderAndPaddingWidthBefore() / 1000f; float borderPaddingAfter = viewport.getBorderAndPaddingWidthAfter() / 1000f; float bpwidth = borderPaddingStart + borderPaddingEnd; float bpheight = borderPaddingBefore + borderPaddingAfter; drawBackAndBorders(viewport, x, y, width + bpwidth, height + bpheight); if (viewport.hasClip()) { saveGraphicsState(); if ((level == -1) || ((level & 1) == 0)) { clipRect(x + borderPaddingStart, y + borderPaddingBefore, width, height); } else { clipRect(x + borderPaddingEnd, y + borderPaddingBefore, width, height); } } super.renderInlineViewport(viewport); if (viewport.hasClip()) { restoreGraphicsState(); } } /** * Restores the state stack after a break out. * @param breakOutList the state stack to restore. */ protected abstract void restoreStateStackAfterBreakOut(List breakOutList); /** * Breaks out of the state stack to handle fixed block-containers. * @return the saved state stack to recreate later */ protected abstract List breakOutOfStateStack(); /** Saves the graphics state of the rendering engine. */ protected abstract void saveGraphicsState(); /** Restores the last graphics state of the rendering engine. */ protected abstract void restoreGraphicsState(); /** Indicates the beginning of a text object. */ protected abstract void beginTextObject(); /** Indicates the end of a text object. */ protected abstract void endTextObject(); /** * Paints the text decoration marks. * @param fm Current typeface * @param fontsize Current font size * @param inline inline area to paint the marks for * @param baseline position of the baseline * @param startx start IPD */ protected void renderTextDecoration(FontMetrics fm, int fontsize, InlineArea inline, int baseline, int startx) { boolean hasTextDeco = inline.hasUnderline() || inline.hasOverline() || inline.hasLineThrough(); if (hasTextDeco) { endTextObject(); float descender = fm.getDescender(fontsize) / 1000f; float capHeight = fm.getCapHeight(fontsize) / 1000f; float halfLineWidth = (descender / -8f) / 2f; float endx = (startx + inline.getIPD()) / 1000f; if (inline.hasUnderline()) { Color ct = (Color) inline.getTrait(Trait.UNDERLINE_COLOR); float y = baseline - descender / 2f; drawBorderLine(startx / 1000f, (y - halfLineWidth) / 1000f, endx, (y + halfLineWidth) / 1000f, true, true, Constants.EN_SOLID, ct); } if (inline.hasOverline()) { Color ct = (Color) inline.getTrait(Trait.OVERLINE_COLOR); float y = (float)(baseline - (1.1 * capHeight)); drawBorderLine(startx / 1000f, (y - halfLineWidth) / 1000f, endx, (y + halfLineWidth) / 1000f, true, true, Constants.EN_SOLID, ct); } if (inline.hasLineThrough()) { Color ct = (Color) inline.getTrait(Trait.LINETHROUGH_COLOR); float y = (float)(baseline - (0.45 * capHeight)); drawBorderLine(startx / 1000f, (y - halfLineWidth) / 1000f, endx, (y + halfLineWidth) / 1000f, true, true, Constants.EN_SOLID, ct); } } } /** Clip using the current path. */ protected abstract void clip(); /** * Clip using a rectangular area. * @param x the x coordinate (in points) * @param y the y coordinate (in points) * @param width the width of the rectangle (in points) * @param height the height of the rectangle (in points) */ protected abstract void clipRect(float x, float y, float width, float height); /** * Moves the current point to (x, y), omitting any connecting line segment. * @param x x coordinate * @param y y coordinate */ protected abstract void moveTo(float x, float y); /** * Appends a straight line segment from the current point to (x, y). The * new current point is (x, y). * @param x x coordinate * @param y y coordinate */ protected abstract void lineTo(float x, float y); /** * Closes the current subpath by appending a straight line segment from * the current point to the starting point of the subpath. */ protected abstract void closePath(); /** * Fill a rectangular area. * @param x the x coordinate * @param y the y coordinate * @param width the width of the rectangle * @param height the height of the rectangle */ protected abstract void fillRect(float x, float y, float width, float height); /** * Establishes a new foreground or fill color. * @param col the color to apply (null skips this operation) * @param fill true to set the fill color, false for the foreground color */ protected abstract void updateColor(Color col, boolean fill); /** * Draw an image at the indicated location. * @param url the URI/URL of the image * @param pos the position of the image * @param foreignAttributes an optional Map with foreign attributes, may be null */ protected abstract void drawImage(String url, Rectangle2D pos, Map foreignAttributes); /** * Draw an image at the indicated location. * @param url the URI/URL of the image * @param pos the position of the image */ protected final void drawImage(String url, Rectangle2D pos) { drawImage(url, pos, null); } /** * Draw a border segment of an XSL-FO style border. * @param x1 starting x coordinate * @param y1 starting y coordinate * @param x2 ending x coordinate * @param y2 ending y coordinate * @param horz true for horizontal border segments, false for vertical border segments * @param startOrBefore true for border segments on the start or before edge, * false for end or after. * @param style the border style (one of Constants.EN_DASHED etc.) * @param col the color for the border segment */ protected abstract void drawBorderLine(float x1, float y1, float x2, float y2, boolean horz, boolean startOrBefore, int style, Color col); /** {@inheritDoc} */ public void renderForeignObject(ForeignObject fo, Rectangle2D pos) { endTextObject(); Document doc = fo.getDocument(); String ns = fo.getNameSpace(); renderDocument(doc, ns, pos, fo.getForeignAttributes()); } /** * Establishes a new coordinate system with the given transformation matrix. * The current graphics state is saved and the new coordinate system is concatenated. * @param at the transformation matrix */ protected void establishTransformationMatrix(AffineTransform at) { saveGraphicsState(); concatenateTransformationMatrix(UnitConv.mptToPt(at)); } }
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.citygml.building.provider; import java.util.Collection; import java.util.List; import net.opengis.citygml.building.BuildingPackage; import net.opengis.citygml.building.WallSurfaceType; import net.opengis.gml.GmlPackage; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.util.FeatureMap; import org.eclipse.emf.ecore.util.FeatureMapUtil; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ViewerNotification; /** * This is the item provider adapter for a {@link net.opengis.citygml.building.WallSurfaceType} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class WallSurfaceTypeItemProvider extends AbstractBoundarySurfaceTypeItemProvider implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public WallSurfaceTypeItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); } return itemPropertyDescriptors; } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(BuildingPackage.Literals.WALL_SURFACE_TYPE__GENERIC_APPLICATION_PROPERTY_OF_WALL_SURFACE_GROUP); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns WallSurfaceType.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/WallSurfaceType")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { String label = ((WallSurfaceType)object).getId(); return label == null || label.length() == 0 ? getString("_UI_WallSurfaceType_type") : getString("_UI_WallSurfaceType_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(WallSurfaceType.class)) { case BuildingPackage.WALL_SURFACE_TYPE__GENERIC_APPLICATION_PROPERTY_OF_WALL_SURFACE_GROUP: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); } /** * This returns the label text for {@link org.eclipse.emf.edit.command.CreateChildCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getCreateChildText(Object owner, Object feature, Object child, Collection<?> selection) { Object childFeature = feature; Object childObject = child; if (childFeature instanceof EStructuralFeature && FeatureMapUtil.isFeatureMap((EStructuralFeature)childFeature)) { FeatureMap.Entry entry = (FeatureMap.Entry)childObject; childFeature = entry.getEStructuralFeature(); childObject = entry.getValue(); } boolean qualify = childFeature == GmlPackage.eINSTANCE.getAbstractGMLType_Name() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_CoordinateOperationName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_CsName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_DatumName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_EllipsoidName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_GroupName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_MeridianName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_MethodName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_ParameterName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_SrsName() || childFeature == GmlPackage.eINSTANCE.getAbstractFeatureType_Location() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_PriorityLocation() || childFeature == BuildingPackage.Literals.ABSTRACT_BOUNDARY_SURFACE_TYPE__LOD2_MULTI_SURFACE || childFeature == BuildingPackage.Literals.ABSTRACT_BOUNDARY_SURFACE_TYPE__LOD3_MULTI_SURFACE || childFeature == BuildingPackage.Literals.ABSTRACT_BOUNDARY_SURFACE_TYPE__LOD4_MULTI_SURFACE; if (qualify) { return getString ("_UI_CreateChild_text2", new Object[] { getTypeText(childObject), getFeatureText(childFeature), getTypeText(owner) }); } return super.getCreateChildText(owner, feature, child, selection); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2019 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.quickstart; import java.awt.Color; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.net.URL; import java.util.List; import javax.swing.Box; import javax.swing.DefaultComboBoxModel; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JPanel; import org.apache.commons.httpclient.URI; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.control.Control.Mode; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.model.OptionsParam; import org.parosproxy.paros.model.SiteNode; import org.parosproxy.paros.view.View; import org.zaproxy.zap.extension.alert.ExtensionAlert; import org.zaproxy.zap.extension.search.SearchPanel; import org.zaproxy.zap.utils.DisplayUtils; import org.zaproxy.zap.view.LayoutHelper; import org.zaproxy.zap.view.NodeSelectDialog; public class AttackPanel extends QuickStartSubPanel { private static final long serialVersionUID = 1L; private static final String DEFAULT_VALUE_URL_FIELD = "http://"; private JCheckBox spiderCheckBox; private JButton attackButton; private JButton stopButton; private JComboBox<String> urlField; private DefaultComboBoxModel<String> urlModel; private JButton selectButton; private JLabel progressLabel; private JPanel contentPanel; private JLabel lowerPadding; private int paddingY; /** Optional class that adds the ajax spider - may be added after init or not at all */ private PlugableSpider plugableSpider; private JLabel plugableSpiderLabel; private int plugableSpiderY; public AttackPanel(ExtensionQuickStart extension, QuickStartPanel qsp) { super(extension, qsp); this.setMode(Control.getSingleton().getMode()); } @Override public String getTitleKey() { return "quickstart.attack.panel.title"; } @Override public JPanel getDescriptionPanel() { JPanel panel = new JPanel(new GridBagLayout()); panel.setBackground(Color.WHITE); panel.add( QuickStartHelper.getWrappedLabel("quickstart.attack.panel.message1"), LayoutHelper.getGBC(0, 0, 2, 1.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); panel.add( QuickStartHelper.getWrappedLabel("quickstart.attack.panel.message2"), LayoutHelper.getGBC(0, 1, 2, 1.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); panel.add( new JLabel(" "), LayoutHelper.getGBC( 0, 2, 2, 1.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); // Spacer return panel; } @Override public JPanel getContentPanel() { if (contentPanel == null) { contentPanel = new JPanel(new GridBagLayout()); contentPanel.setBackground(Color.WHITE); int formPanelY = 0; contentPanel.add( new JLabel(Constant.messages.getString("quickstart.label.attackurl")), LayoutHelper.getGBC( 1, formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); JPanel urlSelectPanel = new JPanel(new GridBagLayout()); selectButton = new JButton(Constant.messages.getString("all.button.select")); selectButton.setIcon( DisplayUtils.getScaledIcon( new ImageIcon( View.class.getResource("/resource/icon/16/094.png")))); // Globe // icon selectButton.addActionListener( new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { NodeSelectDialog nsd = new NodeSelectDialog(View.getSingleton().getMainFrame()); SiteNode node = null; try { node = Model.getSingleton() .getSession() .getSiteTree() .findNode( new URI( getUrlField() .getSelectedItem() .toString(), false)); } catch (Exception e2) { // Ignore } node = nsd.showDialog(node); if (node != null && node.getHistoryReference() != null) { try { getUrlField() .setSelectedItem( node.getHistoryReference().getURI().toString()); } catch (Exception e1) { // Ignore } } } }); urlSelectPanel.add(this.getUrlField(), LayoutHelper.getGBC(0, 0, 1, 0.5D)); urlSelectPanel.add(selectButton, LayoutHelper.getGBC(1, 0, 1, 0.0D)); contentPanel.add(urlSelectPanel, LayoutHelper.getGBC(2, formPanelY, 3, 0.25D)); contentPanel.add( new JLabel(Constant.messages.getString("quickstart.label.tradspider")), LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); contentPanel.add( getSpiderCheckBox(), LayoutHelper.getGBC( 2, formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); plugableSpiderY = ++formPanelY; JPanel buttonPanel = QuickStartHelper.getHorizontalPanel(); buttonPanel.add(this.getAttackButton()); buttonPanel.add(this.getStopButton()); buttonPanel.add(Box.createHorizontalGlue()); contentPanel.add(buttonPanel, LayoutHelper.getGBC(2, ++formPanelY, 1, 1.0D)); contentPanel.add( new JLabel(Constant.messages.getString("quickstart.label.progress")), LayoutHelper.getGBC( 1, ++formPanelY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); contentPanel.add(getProgressLabel(), LayoutHelper.getGBC(2, formPanelY, 1, 1.0D)); paddingY = ++formPanelY; this.replacePadding(); } return contentPanel; } private JLabel getProgressLabel() { if (progressLabel == null) { progressLabel = new JLabel( Constant.messages.getString( "quickstart.progress." + AttackThread.Progress.notstarted.name())); } return progressLabel; } private JCheckBox getSpiderCheckBox() { if (spiderCheckBox == null) { spiderCheckBox = new JCheckBox(); spiderCheckBox.setSelected( getExtensionQuickStart().getQuickStartParam().isTradSpiderEnabled()); spiderCheckBox.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { getExtensionQuickStart() .getQuickStartParam() .setTradSpiderEnabled(spiderCheckBox.isSelected()); } }); } return spiderCheckBox; } public void addPlugableSpider(PlugableSpider plugableSpider) { this.plugableSpider = plugableSpider; addAjaxSpiderGui(); } public void removePlugableSpider(PlugableSpider plugableSpider) { if (contentPanel != null && plugableSpider != null) { contentPanel.remove(plugableSpiderLabel); contentPanel.remove(plugableSpider.getPanel()); replacePadding(); } this.plugableSpider = null; } private void addAjaxSpiderGui() { if (contentPanel != null && this.plugableSpider != null) { plugableSpiderLabel = new JLabel(this.plugableSpider.getLabel()); contentPanel.add( plugableSpiderLabel, LayoutHelper.getGBC( 1, plugableSpiderY, 1, 0.0D, DisplayUtils.getScaledInsets(5, 5, 5, 5))); contentPanel.add( this.plugableSpider.getPanel(), LayoutHelper.getGBC(2, plugableSpiderY, 1, 1.0D)); replacePadding(); } } private void replacePadding() { if (contentPanel != null) { // this may or may not be present if (this.lowerPadding == null) { lowerPadding = new JLabel(""); } else { contentPanel.remove(this.lowerPadding); } contentPanel.add( lowerPadding, LayoutHelper.getGBC(0, paddingY, 1, 0.0D, 1.0D)); // Padding at bottom } } protected void setMode(Mode mode) { switch (mode) { case safe: case protect: this.getUrlField().setEnabled(false); this.getUrlField() .setSelectedItem( Constant.messages.getString("quickstart.field.url.disabled.mode")); this.selectButton.setEnabled(false); this.getAttackButton().setEnabled(false); break; case standard: case attack: this.getUrlField().setEnabled(true); this.getUrlField().setSelectedItem(DEFAULT_VALUE_URL_FIELD); this.selectButton.setEnabled(true); this.getAttackButton().setEnabled(true); break; } } private JComboBox<String> getUrlField() { if (urlField == null) { urlField = new JComboBox<String>(); urlField.setEditable(true); urlField.setModel(getUrlModel()); setRecentUrls(); } return urlField; } protected DefaultComboBoxModel<String> getUrlModel() { if (urlModel == null) { urlModel = new DefaultComboBoxModel<String>(); } return urlModel; } private void setRecentUrls() { if (urlField != null) { QuickStartParam quickStartParam = this.getExtensionQuickStart().getQuickStartParam(); Object currentUrl = urlField.getSelectedItem(); DefaultComboBoxModel<String> model = getUrlModel(); model.removeAllElements(); List<Object> recentUrls = quickStartParam.getRecentUrls(); for (Object url : recentUrls) { if (url != null) { model.addElement(url.toString()); } } if (currentUrl != null && currentUrl.toString().length() > 0) { urlField.setSelectedItem(currentUrl); } else { urlField.setSelectedItem(DEFAULT_VALUE_URL_FIELD); } } } private JButton getAttackButton() { if (attackButton == null) { attackButton = new JButton(); attackButton.setText(Constant.messages.getString("quickstart.button.label.attack")); attackButton.setIcon( DisplayUtils.getScaledIcon( new ImageIcon( SearchPanel.class.getResource( "/resource/icon/16/147.png")))); // 'lightning' // icon attackButton.setToolTipText( Constant.messages.getString("quickstart.button.tooltip.attack")); attackButton.addActionListener( new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (!spiderCheckBox.isSelected() && (plugableSpider == null || !plugableSpider.isSelected())) { getExtensionQuickStart() .getView() .showWarningDialog( Constant.messages.getString( "quickstart.url.warning.nospider")); } else { attackUrl(); } } }); } return attackButton; } private JButton getStopButton() { if (stopButton == null) { stopButton = new JButton(); stopButton.setText(Constant.messages.getString("quickstart.button.label.stop")); stopButton.setIcon( DisplayUtils.getScaledIcon( new ImageIcon( SearchPanel.class.getResource( "/resource/icon/16/142.png")))); // 'stop' // icon stopButton.setToolTipText( Constant.messages.getString("quickstart.button.tooltip.stop")); stopButton.setEnabled(false); stopButton.addActionListener( new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { stopAttack(); } }); } return stopButton; } boolean attackUrl() { Object item = this.getUrlField().getSelectedItem(); if (item == null || DEFAULT_VALUE_URL_FIELD.equals(item.toString())) { getExtensionQuickStart() .getView() .showWarningDialog( Constant.messages.getString("quickstart.url.warning.invalid")); this.getUrlField().requestFocusInWindow(); return false; } String urlStr = item.toString(); URL url; try { url = new URL(urlStr); // Validate the actual request-uri of the HTTP message accessed. new URI(urlStr, true); } catch (Exception e) { getExtensionQuickStart() .getView() .showWarningDialog( Constant.messages.getString("quickstart.url.warning.invalid")); this.getUrlField().requestFocusInWindow(); return false; } this.getExtensionQuickStart().getQuickStartParam().addRecentUrl(urlStr); getAttackButton().setEnabled(false); getStopButton().setEnabled(true); getExtensionQuickStart().attack(url, spiderCheckBox.isSelected()); return true; } void setAttackUrl(String url) { getUrlField().setSelectedItem(url); } private void stopAttack() { getExtensionQuickStart().stopAttack(); stopButton.setEnabled(false); } protected void notifyProgress(AttackThread.Progress progress) { this.notifyProgress(progress, null); } protected void notifyProgress(AttackThread.Progress progress, String msg) { if (msg == null) { msg = Constant.messages.getString("quickstart.progress." + progress.name()); } getProgressLabel().setText(msg); getProgressLabel().setToolTipText(msg); switch (progress) { case complete: getAttackButton().setEnabled(true); getStopButton().setEnabled(false); ExtensionAlert extAlert = ((ExtensionAlert) Control.getSingleton() .getExtensionLoader() .getExtension(ExtensionAlert.NAME)); if (extAlert != null) { extAlert.setAlertTabFocus(); } break; case failed: case stopped: getAttackButton().setEnabled(true); getStopButton().setEnabled(false); break; default: break; } } public void optionsLoaded(QuickStartParam quickStartParam) { this.getSpiderCheckBox().setSelected(quickStartParam.isTradSpiderEnabled()); setRecentUrls(); } public void optionsChanged(OptionsParam optionsParam) { setRecentUrls(); } @Override public ImageIcon getIcon() { return ExtensionQuickStart.ZAP_ICON; } @Override public JPanel getFooterPanel() { return null; } }
/** * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ai.api.services; import android.annotation.TargetApi; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.speech.RecognitionListener; import android.speech.RecognizerIntent; import android.speech.SpeechRecognizer; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.util.Log; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import ai.api.android.AIConfiguration; import ai.api.android.AIService; import ai.api.AIServiceException; import ai.api.PartialResultsListener; import ai.api.RequestExtras; import ai.api.model.AIContext; import ai.api.model.AIError; import ai.api.model.AIRequest; import ai.api.model.AIResponse; import ai.api.util.RecognizerChecker; import ai.api.util.VersionConfig; public class GoogleRecognitionServiceImpl extends AIService { private static final String TAG = GoogleRecognitionServiceImpl.class.getName(); private static final long STOP_DELAY = 1000; private SpeechRecognizer speechRecognizer; private final Object speechRecognizerLock = new Object(); private RequestExtras requestExtras; private PartialResultsListener partialResultsListener; private final VersionConfig versionConfig; private volatile boolean recognitionActive = false; private volatile boolean wasReadyForSpeech; private final Handler handler = new Handler(); private Runnable stopRunnable; private final Map<Integer, String> errorMessages = new HashMap<>(); { errorMessages.put(SpeechRecognizer.ERROR_NETWORK_TIMEOUT, "Network operation timed out."); errorMessages.put(SpeechRecognizer.ERROR_NETWORK, "Other network related errors."); errorMessages.put(SpeechRecognizer.ERROR_AUDIO, "Audio recording error."); errorMessages.put(SpeechRecognizer.ERROR_SERVER, "Server sends error status."); errorMessages.put(SpeechRecognizer.ERROR_CLIENT, "Other client side errors."); errorMessages.put(SpeechRecognizer.ERROR_SPEECH_TIMEOUT, "No speech input."); errorMessages.put(SpeechRecognizer.ERROR_NO_MATCH, "No recognition result matched."); errorMessages.put(SpeechRecognizer.ERROR_RECOGNIZER_BUSY, "RecognitionService busy."); errorMessages.put(SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS, "Insufficient permissions."); } public GoogleRecognitionServiceImpl(final Context context, final AIConfiguration config) { super(config, context); final ComponentName component = RecognizerChecker.findGoogleRecognizer(context); if (component == null) { Log.w(TAG, "Google Recognizer application not found on device. " + "Quality of the recognition may be low. Please check if Google Search application installed and enabled."); } versionConfig = VersionConfig.init(context); if (versionConfig.isAutoStopRecognizer()) { stopRunnable = new Runnable() { @Override public void run() { stopListening(); } }; } } /** * Manage recognizer cancellation runnable. * * @param action (int) (0 - stop, 1 - restart) */ private void updateStopRunnable(final int action) { if (stopRunnable != null) { if (action == 0) { handler.removeCallbacks(stopRunnable); } else if (action == 1) { handler.removeCallbacks(stopRunnable); handler.postDelayed(stopRunnable, STOP_DELAY); } } } protected void initializeRecognizer() { if (speechRecognizer != null) { return; } synchronized (speechRecognizerLock) { if (speechRecognizer != null) { speechRecognizer.destroy(); speechRecognizer = null; } final ComponentName component = RecognizerChecker.findGoogleRecognizer(context); speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context, component); speechRecognizer.setRecognitionListener(new InternalRecognitionListener()); } } protected void clearRecognizer() { Log.d(TAG, "clearRecognizer"); if (speechRecognizer != null) { synchronized (speechRecognizerLock) { if (speechRecognizer != null) { speechRecognizer.destroy(); speechRecognizer = null; } } } } private void sendRequest(@NonNull final AIRequest aiRequest, @Nullable final RequestExtras requestExtras) { if (aiRequest == null) { throw new IllegalArgumentException("aiRequest must be not null"); } final AsyncTask<AIRequest, Integer, AIResponse> task = new AsyncTask<AIRequest, Integer, AIResponse>() { private AIError aiError; @Override protected AIResponse doInBackground(final AIRequest... params) { final AIRequest request = params[0]; try { return aiDataService.request(request, requestExtras); } catch (final AIServiceException e) { aiError = new AIError(e); return null; } } @Override protected void onPostExecute(final AIResponse response) { if (response != null) { onResult(response); } else { onError(aiError); } } }; task.execute(aiRequest); } @Override public void startListening() { startListening(new RequestExtras()); } @Override public void startListening(final List<AIContext> contexts) { startListening(new RequestExtras(contexts, null)); } @Override public void startListening(final RequestExtras requestExtras) { if (!recognitionActive) { synchronized (speechRecognizerLock) { this.requestExtras = requestExtras; if (!checkPermissions()) { final AIError aiError = new AIError("RECORD_AUDIO permission is denied. Please request permission from user."); onError(aiError); return; } initializeRecognizer(); recognitionActive = true; final Intent sttIntent = createRecognitionIntent(); try { wasReadyForSpeech = false; speechRecognizer.startListening(sttIntent); } catch (final SecurityException e) { //Error occurs only on HTC devices. } } } else { Log.w(TAG, "Trying to start recognition while another recognition active"); if (!wasReadyForSpeech) { cancel(); } } } private Intent createRecognitionIntent() { final Intent sttIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); sttIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); final String language = config.getLanguage().replace('-', '_'); sttIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); sttIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, language); sttIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true); sttIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName()); // WORKAROUND for https://code.google.com/p/android/issues/detail?id=75347 sttIntent.putExtra("android.speech.extra.EXTRA_ADDITIONAL_LANGUAGES", new String[]{language}); return sttIntent; } @Override public void stopListening() { synchronized (speechRecognizerLock) { if (speechRecognizer != null) { speechRecognizer.stopListening(); } } } @Override public void cancel() { synchronized (speechRecognizerLock) { if (recognitionActive) { recognitionActive = false; if (speechRecognizer != null) { speechRecognizer.cancel(); } onListeningCancelled(); } } } private void restartRecognition() { updateStopRunnable(0); recognitionActive = false; synchronized (speechRecognizerLock) { try { if (speechRecognizer != null) { speechRecognizer.cancel(); final Intent intent = createRecognitionIntent(); wasReadyForSpeech = false; speechRecognizer.startListening(intent); recognitionActive = true; } } catch (Exception e) { stopListening(); } } } /** * This method must be called from UI thread */ @Override public void pause() { clearRecognizer(); } /** * This method must be called from UI thread */ @Override public void resume() { } public void setPartialResultsListener(PartialResultsListener partialResultsListener) { this.partialResultsListener = partialResultsListener; } protected void onPartialResults(final List<String> partialResults) { if (partialResultsListener != null) { partialResultsListener.onPartialResults(partialResults); } } private void stopInternal() { updateStopRunnable(0); if (versionConfig.isDestroyRecognizer()) clearRecognizer(); recognitionActive = false; } private class InternalRecognitionListener implements RecognitionListener { @Override public void onReadyForSpeech(final Bundle params) { if (recognitionActive) { onListeningStarted(); } wasReadyForSpeech = true; } @Override public void onBeginningOfSpeech() { } @Override public void onRmsChanged(final float rmsdB) { if (recognitionActive) { onAudioLevelChanged(rmsdB); } } @Override public void onBufferReceived(final byte[] buffer) { } @Override public void onEndOfSpeech() { if (recognitionActive) { onListeningFinished(); } } @Override public void onError(final int error) { if (error == SpeechRecognizer.ERROR_NO_MATCH && !wasReadyForSpeech) { Log.d(TAG, "SpeechRecognizer.ERROR_NO_MATCH, restartRecognition()"); restartRecognition(); return; } if (recognitionActive) { final AIError aiError; if (errorMessages.containsKey(error)) { final String description = errorMessages.get(error); aiError = new AIError("Speech recognition engine error: " + description); } else { aiError = new AIError("Speech recognition engine error: " + error); } GoogleRecognitionServiceImpl.this.onError(aiError); } stopInternal(); } @TargetApi(14) @Override public void onResults(final Bundle results) { if (recognitionActive) { final ArrayList<String> recognitionResults = results .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); float[] rates = null; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { rates = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES); } if (recognitionResults == null || recognitionResults.isEmpty()) { // empty response GoogleRecognitionServiceImpl.this.onResult(new AIResponse()); } else { final AIRequest aiRequest = new AIRequest(); if (rates != null) { aiRequest.setQuery(recognitionResults.toArray(new String[recognitionResults.size()]), rates); } else { aiRequest.setQuery(recognitionResults.get(0)); } // notify listeners about the last recogntion result for more accurate user feedback GoogleRecognitionServiceImpl.this.onPartialResults(recognitionResults); GoogleRecognitionServiceImpl.this.sendRequest(aiRequest, requestExtras); } } stopInternal(); } @Override public void onPartialResults(final Bundle partialResults) { if (recognitionActive) { updateStopRunnable(1); final ArrayList<String> partialRecognitionResults = partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (partialRecognitionResults != null && !partialRecognitionResults.isEmpty()) { GoogleRecognitionServiceImpl.this.onPartialResults(partialRecognitionResults); } } } @Override public void onEvent(final int eventType, final Bundle params) { } } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2017 the original author or authors. */ package org.assertj.core.internal; import static java.lang.String.format; import static org.assertj.core.error.ShouldBeAbsolutePath.shouldBeAbsolutePath; import static org.assertj.core.error.ShouldBeCanonicalPath.shouldBeCanonicalPath; import static org.assertj.core.error.ShouldBeDirectory.shouldBeDirectory; import static org.assertj.core.error.ShouldBeExecutable.shouldBeExecutable; import static org.assertj.core.error.ShouldBeNormalized.shouldBeNormalized; import static org.assertj.core.error.ShouldBeReadable.shouldBeReadable; import static org.assertj.core.error.ShouldBeRegularFile.shouldBeRegularFile; import static org.assertj.core.error.ShouldBeRelativePath.shouldBeRelativePath; import static org.assertj.core.error.ShouldBeSymbolicLink.shouldBeSymbolicLink; import static org.assertj.core.error.ShouldBeWritable.shouldBeWritable; import static org.assertj.core.error.ShouldEndWithPath.shouldEndWith; import static org.assertj.core.error.ShouldExist.shouldExist; import static org.assertj.core.error.ShouldExist.shouldExistNoFollowLinks; import static org.assertj.core.error.ShouldHaveBinaryContent.shouldHaveBinaryContent; import static org.assertj.core.error.ShouldHaveContent.shouldHaveContent; import static org.assertj.core.error.ShouldHaveName.shouldHaveName; import static org.assertj.core.error.ShouldHaveNoParent.shouldHaveNoParent; import static org.assertj.core.error.ShouldHaveParent.shouldHaveParent; import static org.assertj.core.error.ShouldHaveSameContent.shouldHaveSameContent; import static org.assertj.core.error.ShouldNotExist.shouldNotExist; import static org.assertj.core.error.ShouldStartWithPath.shouldStartWith; import static org.assertj.core.util.Preconditions.checkArgument; import static org.assertj.core.util.Preconditions.checkNotNull; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.LinkOption; import java.nio.file.Path; import java.util.List; import org.assertj.core.api.AssertionInfo; import org.assertj.core.api.exception.PathsException; import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.util.VisibleForTesting; import org.assertj.core.util.diff.Delta; /** * Core assertion class for {@link Path} assertions */ public class Paths { private static final String FAILED_TO_RESOLVE_ARGUMENT_REAL_PATH = "failed to resolve argument real path"; private static final String FAILED_TO_RESOLVE_ACTUAL_REAL_PATH = "failed to resolve actual real path"; @VisibleForTesting public static final String IOERROR_FORMAT = "I/O error attempting to process assertion for path: <%s>"; private static final Paths INSTANCE = new Paths(); @VisibleForTesting Diff diff = new Diff(); @VisibleForTesting BinaryDiff binaryDiff = new BinaryDiff(); @VisibleForTesting Failures failures = Failures.instance(); private NioFilesWrapper nioFilesWrapper; public static Paths instance() { return INSTANCE; } @VisibleForTesting Paths(NioFilesWrapper nioFilesWrapper) { this.nioFilesWrapper = nioFilesWrapper; } private Paths() { this(NioFilesWrapper.instance()); } public void assertIsReadable(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); assertExists(info, actual); if (!nioFilesWrapper.isReadable(actual)) throw failures.failure(info, shouldBeReadable(actual)); } public void assertIsWritable(AssertionInfo info, Path actual) { assertNotNull(info, actual); assertExists(info, actual); if (!nioFilesWrapper.isWritable(actual)) throw failures.failure(info, shouldBeWritable(actual)); } public void assertIsExecutable(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); assertExists(info, actual); if (!nioFilesWrapper.isExecutable(actual)) throw failures.failure(info, shouldBeExecutable(actual)); } public void assertExists(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); if (!nioFilesWrapper.exists(actual)) throw failures.failure(info, shouldExist(actual)); } public void assertExistsNoFollowLinks(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); if (!nioFilesWrapper.exists(actual, LinkOption.NOFOLLOW_LINKS)) throw failures.failure(info, shouldExistNoFollowLinks(actual)); } public void assertDoesNotExist(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); if (!nioFilesWrapper.notExists(actual, LinkOption.NOFOLLOW_LINKS)) throw failures.failure(info, shouldNotExist(actual)); } public void assertIsRegularFile(final AssertionInfo info, final Path actual) { assertExists(info, actual); if (!nioFilesWrapper.isRegularFile(actual)) throw failures.failure(info, shouldBeRegularFile(actual)); } public void assertIsDirectory(final AssertionInfo info, final Path actual) { assertExists(info, actual); if (!nioFilesWrapper.isDirectory(actual)) throw failures.failure(info, shouldBeDirectory(actual)); } public void assertIsSymbolicLink(final AssertionInfo info, final Path actual) { assertExistsNoFollowLinks(info, actual); if (!nioFilesWrapper.isSymbolicLink(actual)) throw failures.failure(info, shouldBeSymbolicLink(actual)); } public void assertIsAbsolute(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); if (!actual.isAbsolute()) throw failures.failure(info, shouldBeAbsolutePath(actual)); } public void assertIsRelative(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); if (actual.isAbsolute()) throw failures.failure(info, shouldBeRelativePath(actual)); } public void assertIsNormalized(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); if (!actual.normalize().equals(actual)) throw failures.failure(info, shouldBeNormalized(actual)); } public void assertIsCanonical(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); try { if (!actual.equals(actual.toRealPath())) throw failures.failure(info, shouldBeCanonicalPath(actual)); } catch (IOException e) { throw new PathsException(FAILED_TO_RESOLVE_ACTUAL_REAL_PATH, e); } } public void assertHasParent(final AssertionInfo info, final Path actual, final Path expected) { assertNotNull(info, actual); checkExpectedParentPathIsNotNull(expected); final Path canonicalActual; try { canonicalActual = actual.toRealPath(); } catch (IOException e) { throw new PathsException(FAILED_TO_RESOLVE_ACTUAL_REAL_PATH, e); } final Path canonicalExpected; try { canonicalExpected = expected.toRealPath(); } catch (IOException e) { throw new PathsException(FAILED_TO_RESOLVE_ARGUMENT_REAL_PATH, e); } final Path actualParent = canonicalActual.getParent(); if (actualParent == null) throw failures.failure(info, shouldHaveParent(actual, expected)); if (!actualParent.equals(canonicalExpected)) throw failures.failure(info, shouldHaveParent(actual, actualParent, expected)); } public void assertHasParentRaw(final AssertionInfo info, final Path actual, final Path expected) { assertNotNull(info, actual); checkExpectedParentPathIsNotNull(expected); final Path actualParent = actual.getParent(); if (actualParent == null) throw failures.failure(info, shouldHaveParent(actual, expected)); if (!actualParent.equals(expected)) throw failures.failure(info, shouldHaveParent(actual, actualParent, expected)); } public void assertHasNoParent(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); try { final Path canonicalActual = actual.toRealPath(); if (canonicalActual.getParent() != null) throw failures.failure(info, shouldHaveNoParent(actual)); } catch (IOException e) { throw new PathsException(FAILED_TO_RESOLVE_ACTUAL_REAL_PATH, e); } } public void assertHasNoParentRaw(final AssertionInfo info, final Path actual) { assertNotNull(info, actual); if (actual.getParent() != null) throw failures.failure(info, shouldHaveNoParent(actual)); } public void assertStartsWith(final AssertionInfo info, final Path actual, final Path start) { assertNotNull(info, actual); assertExpectedStartPathIsNotNull(start); final Path canonicalActual; try { canonicalActual = actual.toRealPath(); } catch (IOException e) { throw new PathsException(FAILED_TO_RESOLVE_ACTUAL_REAL_PATH, e); } final Path canonicalOther; try { canonicalOther = start.toRealPath(); } catch (IOException e) { throw new PathsException(FAILED_TO_RESOLVE_ARGUMENT_REAL_PATH, e); } if (!canonicalActual.startsWith(canonicalOther)) throw failures.failure(info, shouldStartWith(actual, start)); } public void assertStartsWithRaw(final AssertionInfo info, final Path actual, final Path other) { assertNotNull(info, actual); assertExpectedStartPathIsNotNull(other); if (!actual.startsWith(other)) throw failures.failure(info, shouldStartWith(actual, other)); } public void assertEndsWith(final AssertionInfo info, final Path actual, final Path end) { assertNotNull(info, actual); assertExpectedEndPathIsNotNull(end); try { final Path canonicalActual = actual.toRealPath(); if (!canonicalActual.endsWith(end.normalize())) throw failures.failure(info, shouldEndWith(actual, end)); } catch (IOException e) { throw new PathsException(FAILED_TO_RESOLVE_ACTUAL_REAL_PATH, e); } } public void assertEndsWithRaw(final AssertionInfo info, final Path actual, final Path end) { assertNotNull(info, actual); assertExpectedEndPathIsNotNull(end); if (!actual.endsWith(end)) throw failures.failure(info, shouldEndWith(actual, end)); } public void assertHasFileName(final AssertionInfo info, Path actual, String fileName) { assertNotNull(info, actual); checkNotNull(fileName, "expected fileName should not be null"); if (!actual.getFileName().endsWith(fileName)) throw failures.failure(info, shouldHaveName(actual, fileName)); } private static void assertNotNull(final AssertionInfo info, final Path actual) { Objects.instance().assertNotNull(info, actual); } private static void checkExpectedParentPathIsNotNull(final Path expected) { checkNotNull(expected, "expected parent path should not be null"); } private static void assertExpectedStartPathIsNotNull(final Path start) { checkNotNull(start, "the expected start path should not be null"); } private static void assertExpectedEndPathIsNotNull(final Path end) { checkNotNull(end, "the expected end path should not be null"); } public void assertHasContent(final AssertionInfo info, Path actual, String expected, Charset charset) { checkNotNull(expected, "The text to compare to should not be null"); assertIsReadable(info, actual); try { List<Delta<String>> diffs = diff.diff(actual, expected, charset); if (diffs.isEmpty()) return; throw failures.failure(info, shouldHaveContent(actual, charset, diffs)); } catch (IOException e) { throw new RuntimeIOException(format("Unable to verify text contents of path:<%s>", actual), e); } } public void assertHasBinaryContent(AssertionInfo info, Path actual, byte[] expected) { checkNotNull(expected, "The binary content to compare to should not be null"); assertIsReadable(info, actual); try { BinaryDiffResult diffResult = binaryDiff.diff(actual, expected); if (diffResult.hasNoDiff()) return; throw failures.failure(info, shouldHaveBinaryContent(actual, diffResult)); } catch (IOException e) { throw new RuntimeIOException(format("Unable to verify binary contents of path:<%s>", actual), e); } } public void assertHasSameContentAs(AssertionInfo info, Path actual, Charset actualCharset, Path expected, Charset expectedCharset) { checkNotNull(expected, "The given Path to compare actual content to should not be null"); checkArgument(nioFilesWrapper.isReadable(expected), "The given Path <%s> to compare actual content to should be readable", expected); assertIsReadable(info, actual); try { List<Delta<String>> diffs = diff.diff(actual, actualCharset, expected, expectedCharset); if (diffs.isEmpty()) return; throw failures.failure(info, shouldHaveSameContent(actual, expected, diffs)); } catch (IOException e) { throw new RuntimeIOException(format("Unable to compare contents of paths:<%s> and:<%s>", actual, expected), e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.visor.node; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Set; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.visor.VisorDataTransferObject; /** * Data collector task arguments. */ public class VisorNodeDataCollectorTaskArg extends VisorDataTransferObject { /** */ private static final long serialVersionUID = 0L; /** Whether task monitoring should be enabled. */ private boolean taskMonitoringEnabled; /** Visor unique key to get last event order from node local storage. */ private String evtOrderKey; /** Visor unique key to get lost events throttle counter from node local storage. */ private String evtThrottleCntrKey; /** If {@code true} then collect information about system caches. */ private boolean sysCaches; /** If {@code false} then cache metrics will not be collected. */ private boolean collectCacheMetrics; /** Optional Set of cache groups, if provided, then caches only from that groups will be collected. */ private Set<String> cacheGrps; /** * Default constructor. */ public VisorNodeDataCollectorTaskArg() { // No-op. } /** * Create task arguments with given parameters. * * @param taskMonitoringEnabled If {@code true} then Visor should collect information about tasks. * @param evtOrderKey Event order key, unique for Visor instance. * @param evtThrottleCntrKey Event throttle counter key, unique for Visor instance. * @param sysCaches If {@code true} then collect information about system caches. * @param collectCacheMetrics If {@code false} then cache metrics will not be collected. * @param cacheGrps Optional Set of cache groups, if provided, then caches only from that groups will be collected. */ public VisorNodeDataCollectorTaskArg( boolean taskMonitoringEnabled, String evtOrderKey, String evtThrottleCntrKey, boolean sysCaches, boolean collectCacheMetrics, Set<String> cacheGrps ) { this.taskMonitoringEnabled = taskMonitoringEnabled; this.evtOrderKey = evtOrderKey; this.evtThrottleCntrKey = evtThrottleCntrKey; this.sysCaches = sysCaches; this.collectCacheMetrics = collectCacheMetrics; this.cacheGrps = cacheGrps; } /** * Create task arguments with given parameters. * * @param taskMonitoringEnabled If {@code true} then Visor should collect information about tasks. * @param evtOrderKey Event order key, unique for Visor instance. * @param evtThrottleCntrKey Event throttle counter key, unique for Visor instance. * @param sysCaches If {@code true} then collect information about system caches. * @param collectCacheMetrics If {@code false} then cache metrics will not be collected. */ public VisorNodeDataCollectorTaskArg( boolean taskMonitoringEnabled, String evtOrderKey, String evtThrottleCntrKey, boolean sysCaches, boolean collectCacheMetrics ) { this(taskMonitoringEnabled, evtOrderKey, evtThrottleCntrKey, sysCaches, collectCacheMetrics, null); } /** * Create task arguments with given parameters. * * @param taskMonitoringEnabled If {@code true} then Visor should collect information about tasks. * @param evtOrderKey Event order key, unique for Visor instance. * @param evtThrottleCntrKey Event throttle counter key, unique for Visor instance. * @param sysCaches If {@code true} then collect information about system caches. */ public VisorNodeDataCollectorTaskArg( boolean taskMonitoringEnabled, String evtOrderKey, String evtThrottleCntrKey, boolean sysCaches ) { this(taskMonitoringEnabled, evtOrderKey, evtThrottleCntrKey, sysCaches, true, null); } /** * @return {@code true} if Visor should collect information about tasks. */ public boolean isTaskMonitoringEnabled() { return taskMonitoringEnabled; } /** * @param taskMonitoringEnabled If {@code true} then Visor should collect information about tasks. */ public void setTaskMonitoringEnabled(boolean taskMonitoringEnabled) { this.taskMonitoringEnabled = taskMonitoringEnabled; } /** * @return Key for store and read last event order number. */ public String getEventsOrderKey() { return evtOrderKey; } /** * @param evtOrderKey Key for store and read last event order number. */ public void setEventsOrderKey(String evtOrderKey) { this.evtOrderKey = evtOrderKey; } /** * @return Key for store and read events throttle counter. */ public String getEventsThrottleCounterKey() { return evtThrottleCntrKey; } /** * @param evtThrottleCntrKey Key for store and read events throttle counter. */ public void setEventsThrottleCounterKey(String evtThrottleCntrKey) { this.evtThrottleCntrKey = evtThrottleCntrKey; } /** * @return {@code true} if Visor should collect metrics for system caches. */ public boolean getSystemCaches() { return sysCaches; } /** * @param sysCaches {@code true} if Visor should collect metrics for system caches. */ public void setSystemCaches(boolean sysCaches) { this.sysCaches = sysCaches; } /** * @return If {@code false} then cache metrics will not be collected. */ public boolean isCollectCacheMetrics() { return collectCacheMetrics; } /** * @param collectCacheMetrics If {@code false} then cache metrics will not be collected. */ public void setCollectCacheMetrics(boolean collectCacheMetrics) { this.collectCacheMetrics = collectCacheMetrics; } /** * @return Optional cache group, if provided, then caches only from that group will be collected. */ public Set<String> getCacheGroups() { return cacheGrps; } /** * @param cacheGrps Optional Set of cache groups, if provided, then caches only from that groups will be collected. */ public void setCacheGroups(Set<String> cacheGrps) { this.cacheGrps = cacheGrps; } /** {@inheritDoc} */ @Override public byte getProtocolVersion() { return V3; } /** {@inheritDoc} */ @Override protected void writeExternalData(ObjectOutput out) throws IOException { out.writeBoolean(taskMonitoringEnabled); U.writeString(out, evtOrderKey); U.writeString(out, evtThrottleCntrKey); out.writeBoolean(sysCaches); out.writeBoolean(collectCacheMetrics); U.writeCollection(out, cacheGrps); } /** {@inheritDoc} */ @Override protected void readExternalData(byte protoVer, ObjectInput in) throws IOException, ClassNotFoundException { taskMonitoringEnabled = in.readBoolean(); evtOrderKey = U.readString(in); evtThrottleCntrKey = U.readString(in); sysCaches = in.readBoolean(); collectCacheMetrics = protoVer < V2 || in.readBoolean(); cacheGrps = protoVer < V3 ? null : U.readSet(in); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(VisorNodeDataCollectorTaskArg.class, this); } }
/* * Copyright 2017 Hewlett-Packard Development Company, L.P. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.hp.application.automation.tools.octane.tests; import com.hp.application.automation.tools.octane.ResultQueue; import com.hp.application.automation.tools.octane.tests.junit.TestResultStatus; import hudson.matrix.*; import hudson.maven.MavenModuleSet; import hudson.model.AbstractBuild; import hudson.model.FreeStyleProject; import hudson.tasks.Maven; import hudson.tasks.junit.JUnitResultArchiver; import org.junit.*; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.ToolInstallations; import java.io.File; import java.io.FileNotFoundException; import java.util.*; @SuppressWarnings({"squid:S2699","squid:S3658","squid:S2259","squid:S1872","squid:S2925","squid:S109","squid:S1607","squid:S2701","squid:S2698"}) public class JUnitResultsTest { private static Set<String> helloWorld2Tests = new HashSet<>(); static { helloWorld2Tests.add(TestUtils.testSignature("helloWorld2", "hello", "HelloWorld2Test", "testOnce", TestResultStatus.PASSED)); helloWorld2Tests.add(TestUtils.testSignature("helloWorld2", "hello", "HelloWorld2Test", "testDoce", TestResultStatus.PASSED)); } private static Set<String> subFolderHelloWorldTests = new HashSet<>(); static { subFolderHelloWorldTests.add(TestUtils.testSignature("subFolder/helloWorld", "hello", "HelloWorldTest", "testOne", TestResultStatus.PASSED)); subFolderHelloWorldTests.add(TestUtils.testSignature("subFolder/helloWorld", "hello", "HelloWorldTest", "testTwo", TestResultStatus.FAILED)); subFolderHelloWorldTests.add(TestUtils.testSignature("subFolder/helloWorld", "hello", "HelloWorldTest", "testThree", TestResultStatus.SKIPPED)); } @ClassRule public static final JenkinsRule rule = new JenkinsRule(); private static String mavenName; private TestQueue queue; @BeforeClass public static void prepareClass() throws Exception { rule.jenkins.setNumExecutors(10); Maven.MavenInstallation mavenInstallation = ToolInstallations.configureMaven3(); mavenName = mavenInstallation.getName(); } @Before public void prepareTest() { TestListener testListener = ExtensionUtil.getInstance(rule, TestListener.class); queue = new TestQueue(); testListener._setTestResultQueue(queue); } @Test public void testJUnitResults() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); FreeStyleProject project = rule.createFreeStyleProject(projectName); project.getBuildersList().add(new Maven("-s settings.xml clean test", mavenName, null, null, "-Dmaven.test.failure.ignore=true")); project.getPublishersList().add(new JUnitResultArchiver("**/target/surefire-reports/*.xml")); project.setScm(new CopyResourceSCM("/helloWorldRoot")); AbstractBuild build = TestUtils.runAndCheckBuild(project); matchTests(build, projectName, TestUtils.helloWorldTests, helloWorld2Tests); Assert.assertEquals(Collections.singleton(projectName + "#1"), getQueuedItems()); } @Test public void testJUnitResultsPom() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); FreeStyleProject project = rule.createFreeStyleProject(projectName); project.getBuildersList().add(new Maven("-s subFolder/settings.xml clean test", mavenName, "subFolder/helloWorld/pom.xml", null, "-Dmaven.test.failure.ignore=true")); project.getPublishersList().add(new JUnitResultArchiver("**/target/surefire-reports/*.xml")); project.setScm(new CopyResourceSCM("/helloWorldRoot", "subFolder")); AbstractBuild build = TestUtils.runAndCheckBuild(project); matchTests(build, projectName, subFolderHelloWorldTests); Assert.assertEquals(Collections.singleton(projectName + "#1"), getQueuedItems()); } @Test public void testJUnitResultsTwoPoms() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); FreeStyleProject project = rule.createFreeStyleProject(projectName); project.getBuildersList().add(new Maven("-s settings.xml clean test", mavenName, "helloWorld/pom.xml", null, "-Dmaven.test.failure.ignore=true")); project.getBuildersList().add(new Maven("-s settings.xml clean test", mavenName, "helloWorld2/pom.xml", null, "-Dmaven.test.failure.ignore=true")); project.getPublishersList().add(new JUnitResultArchiver("**/target/surefire-reports/*.xml")); project.setScm(new CopyResourceSCM("/helloWorldRoot")); AbstractBuild build = TestUtils.runAndCheckBuild(project); matchTests(build, projectName, TestUtils.helloWorldTests, helloWorld2Tests); Assert.assertEquals(Collections.singleton(projectName + "#1"), getQueuedItems()); } @Test public void testJUnitResultsLegacy() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); MavenModuleSet project = rule.createProject(MavenModuleSet.class, projectName); project.runHeadless(); project.setMaven(mavenName); project.setGoals("-s settings.xml clean test -Dmaven.test.failure.ignore=true"); project.getPublishersList().add(new JUnitResultArchiver("**/target/surefire-reports/*.xml")); project.setScm(new CopyResourceSCM("/helloWorldRoot")); AbstractBuild build = TestUtils.runAndCheckBuild(project); matchTests(build, projectName, TestUtils.helloWorldTests, helloWorld2Tests); Assert.assertEquals(Collections.singleton(projectName + "#1"), getQueuedItems()); } @Test public void testJUnitResultsLegacyWithoutJUnitArchiver() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); MavenModuleSet project = rule.createProject(MavenModuleSet.class, projectName); project.runHeadless(); project.setMaven(mavenName); project.setGoals("-s settings.xml clean test -Dmaven.test.failure.ignore=true"); project.setScm(new CopyResourceSCM("/helloWorldRoot")); AbstractBuild build = TestUtils.runAndCheckBuild(project); matchTests(build, projectName, TestUtils.helloWorldTests, helloWorld2Tests); Assert.assertEquals(Collections.singleton(projectName + "#1"), getQueuedItems()); } @Test public void testJUnitResultsLegacySubfolder() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); MavenModuleSet project = rule.createProject(MavenModuleSet.class, projectName); project.runHeadless(); project.setMaven(mavenName); project.setRootPOM("subFolder/helloWorld/pom.xml"); project.setGoals("-s settings.xml clean test -Dmaven.test.failure.ignore=true"); project.getPublishersList().add(new JUnitResultArchiver("**/target/surefire-reports/*.xml")); project.setScm(new CopyResourceSCM("/helloWorldRoot", "subFolder")); AbstractBuild build = TestUtils.runAndCheckBuild(project); matchTests(build, projectName, subFolderHelloWorldTests); Assert.assertEquals(Collections.singleton(projectName + "#1"), getQueuedItems()); } @Test public void testJUnitResultsWorkspaceStripping() throws Exception { Set<String> uftTests = new HashSet<>(); uftTests.add(TestUtils.testSignature("", "All-Tests", "<None>", "subfolder" + File.separator + "CalculatorPlusNextGen", TestResultStatus.FAILED)); String projectName = "root-job-" + UUID.randomUUID().toString(); FreeStyleProject project = rule.createFreeStyleProject(projectName); project.getPublishersList().add(new TestCustomJUnitArchiver("UFT_results.xml")); project.setScm(new CopyResourceSCM("/UFT")); AbstractBuild build = TestUtils.runAndCheckBuild(project); matchTests(build, projectName, uftTests); Assert.assertEquals(Collections.singleton(projectName + "#1"), getQueuedItems()); } @Test public void testJUnitResultsFreeStyleModule() throws Exception { // this scenario simulates FreeStyle project with maven executed via shell (by not using Maven builder directly) String projectName = "root-job-" + UUID.randomUUID().toString(); FreeStyleProject project = rule.createFreeStyleProject(projectName); project.getBuildersList().add(new Maven("-s settings.xml clean test", mavenName, null, null, "-Dmaven.test.failure.ignore=true")); project.getPublishersList().add(new JUnitResultArchiver("**/target/surefire-reports/*.xml")); project.setScm(new CopyResourceSCM("/helloWorldRoot")); AbstractBuild build = TestUtils.runAndCheckBuild(project); matchTests(build, projectName, TestUtils.helloWorldTests, helloWorld2Tests); Assert.assertEquals(Collections.singleton(projectName + "#1"), getQueuedItems()); } @Test public void testJUnitResultsMatrixProject() throws Exception { String projectName = "root-job-" + UUID.randomUUID().toString(); MatrixProject matrixProject = rule.createProject(MatrixProject.class, projectName); matrixProject.setAxes(new AxisList(new Axis("osType", "Linux", "Windows"))); matrixProject.getBuildersList().add(new Maven("-s settings.xml clean test -Dmaven.test.failure.ignore=true -X", mavenName)); matrixProject.getPublishersList().add(new JUnitResultArchiver("**/target/surefire-reports/*.xml")); matrixProject.setScm(new CopyResourceSCM("/helloWorldRoot")); MatrixBuild build = (MatrixBuild) TestUtils.runAndCheckBuild(matrixProject); for (MatrixRun run : build.getExactRuns()) { matchTests(run, projectName, TestUtils.helloWorldTests, helloWorld2Tests); } Assert.assertEquals(new HashSet<>(Arrays.asList(projectName + "/osType=Windows#1", projectName + "/osType=Linux#1")), getQueuedItems()); Assert.assertFalse(new File(build.getRootDir(), "mqmTests.xml").exists()); } private Set<String> getQueuedItems() { Set<String> ret = new HashSet<>(); ResultQueue.QueueItem item; while ((item = queue.peekFirst()) != null) { ret.add(item.getProjectName() + "#" + item.getBuildNumber()); queue.remove(); } return ret; } private void matchTests(AbstractBuild build, String projectName, Set<String>... expectedTests) throws FileNotFoundException { File mqmTestsXml = new File(build.getRootDir(), "mqmTests.xml"); TestUtils.matchTests(new TestResultIterable(mqmTestsXml), projectName, build.getStartTimeInMillis(), expectedTests); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.spi.observation; import org.junit.Test; import static com.google.common.collect.ImmutableSet.of; import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; public class ChangeSetBuilderTest { @Test public void basicMerge() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(5, 2); add(cb1, "1"); ChangeSetBuilder cb2 = new ChangeSetBuilder(5, 2); add(cb2, "2"); ChangeSet cs = cb1.add(cb2.build()).build(); assertThat(cs.getAllNodeTypes(), containsInAnyOrder("nt-1", "nt-2")); assertThat(cs.getParentPaths(), containsInAnyOrder("p-1", "p-2")); assertThat(cs.getParentNodeNames(), containsInAnyOrder("nn-1", "nn-2")); assertThat(cs.getParentNodeTypes(), containsInAnyOrder("pnt-1", "pnt-2")); assertThat(cs.getPropertyNames(), containsInAnyOrder("pn-1", "pn-2")); } @Test public void addedChangeSetAlreadyOverflown() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(5, 2); add(cb1, "1"); ChangeSet cs1 = new ChangeSet(2, null, of("nn-2"), of("nt-2"), of("pn-2"), of("nt-2")); ChangeSet mcs1 = cb1.add(cs1).build(); assertNull(mcs1.getParentPaths()); assertThat(mcs1.getAllNodeTypes(), containsInAnyOrder("nt-1", "nt-2")); } @Test public void overflowPath() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(2, 2); add(cb1, "1"); ChangeSet cs1 = new ChangeSet(2, null, of("nn-2"), of("pnt-2"), of("pn-2"), of("nt-2")); ChangeSet cs = cb1.add(cs1).build(); assertNull(cs.getParentPaths()); assertThat(cs.getAllNodeTypes(), containsInAnyOrder("nt-1", "nt-2")); assertThat(cs.getParentNodeNames(), containsInAnyOrder("nn-1", "nn-2")); assertThat(cs.getParentNodeTypes(), containsInAnyOrder("pnt-1", "pnt-2")); assertThat(cs.getPropertyNames(), containsInAnyOrder("pn-1", "pn-2")); ChangeSet cs2 = new ChangeSet(2, of("p-2", "p-3"), of("nn-2"), of("pnt-2"), of("pn-2"), of("nt-2")); cs = cb1.add(cs2).build(); assertNull(cs.getParentPaths()); } @Test public void overflowParentNodeName() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(2, 2); add(cb1, "1"); ChangeSet cs1 = new ChangeSet(2, of("p-2"), null, of("pnt-2"), of("pn-2"), of("nt-2")); ChangeSet cs = cb1.add(cs1).build(); assertNull(cs.getParentNodeNames()); assertThat(cs.getAllNodeTypes(), containsInAnyOrder("nt-1", "nt-2")); assertThat(cs.getParentNodeTypes(), containsInAnyOrder("pnt-1", "pnt-2")); assertThat(cs.getPropertyNames(), containsInAnyOrder("pn-1", "pn-2")); ChangeSet cs2 = new ChangeSet(2, of("p-2"), of("nn-2", "nn-3"), of("pnt-2"), of("pn-2"), of("nt-2")); cs = cb1.add(cs2).build(); assertNull(cs.getParentNodeNames()); } @Test public void overflowParentNodeTypes() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(2, 2); add(cb1, "1"); ChangeSet cs1 = new ChangeSet(2, of("p-2"), of("nn-2"), null, of("pn-2"), of("nt-2")); ChangeSet cs = cb1.add(cs1).build(); assertNull(cs.getParentNodeTypes()); assertThat(cs.getParentPaths(), containsInAnyOrder("p-1", "p-2")); assertThat(cs.getAllNodeTypes(), containsInAnyOrder("nt-1", "nt-2")); assertThat(cs.getParentNodeNames(), containsInAnyOrder("nn-1", "nn-2")); assertThat(cs.getPropertyNames(), containsInAnyOrder("pn-1", "pn-2")); } @Test public void overflowPropertyNames() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(2, 2); add(cb1, "1"); ChangeSet cs1 = new ChangeSet(2, of("p-2"), of("nn-2"), of("pnt-2"), null, of("nt-2")); ChangeSet cs = cb1.add(cs1).build(); assertNull(cs.getPropertyNames()); assertThat(cs.getParentPaths(), containsInAnyOrder("p-1", "p-2")); assertThat(cs.getAllNodeTypes(), containsInAnyOrder("nt-1", "nt-2")); assertThat(cs.getParentNodeNames(), containsInAnyOrder("nn-1", "nn-2")); assertThat(cs.getParentNodeTypes(), containsInAnyOrder("pnt-1", "pnt-2")); } @Test public void overflowAllNodeTypes() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(2, 2); add(cb1, "1"); ChangeSet cs1 = new ChangeSet(2, of("p-2"), of("nn-2"), of("pnt-2"), of("pn-2"), null); ChangeSet cs = cb1.add(cs1).build(); assertNull(cs.getAllNodeTypes()); assertThat(cs.getParentPaths(), containsInAnyOrder("p-1", "p-2")); assertThat(cs.getParentNodeNames(), containsInAnyOrder("nn-1", "nn-2")); assertThat(cs.getParentNodeTypes(), containsInAnyOrder("pnt-1", "pnt-2")); assertThat(cs.getPropertyNames(), containsInAnyOrder("pn-1", "pn-2")); } @Test public void pathDepth() throws Exception{ ChangeSetBuilder cb = new ChangeSetBuilder(10, 2); cb.addParentPath("/a/b"); cb.addParentPath("/x"); cb.addParentPath("/p/q/r"); ChangeSet cs = cb.build(); assertThat(cs.getParentPaths(), containsInAnyOrder("/a/b", "/x", "/p/q")); } @Test public void changeSetDepthMoreThanBuilder() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(10, 3); cb1.addParentPath("/x"); cb1.addParentPath("/x/y"); cb1.addParentPath("/x/y/z"); ChangeSetBuilder cb2 = new ChangeSetBuilder(10, 8); cb2.addParentPath("/p"); cb2.addParentPath("/p/q"); cb2.addParentPath("/p/q/r"); cb2.addParentPath("/a/b/c/d"); cb2.addParentPath("/a/b/x/y/z"); cb1.add(cb2.build()); ChangeSet cs = cb1.build(); assertThat(cs.getParentPaths(), containsInAnyOrder( "/x", "/x/y", "/x/y/z", "/p", "/p/q", "/p/q/r", "/a/b/c", "/a/b/x" //Chopped paths )); assertEquals(cb1.getMaxPrefilterPathDepth(), cs.getMaxPrefilterPathDepth()); } @Test public void builderDepthMoreThanChangeSet() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(10, 8); cb1.addParentPath("/p"); cb1.addParentPath("/p/q"); cb1.addParentPath("/p/q/r"); cb1.addParentPath("/a/b/c/d"); cb1.addParentPath("/a/b/x/y/z"); ChangeSetBuilder cb2 = new ChangeSetBuilder(10, 2); cb2.addParentPath("/x"); cb2.addParentPath("/x/y"); cb1.add(cb2.build()); ChangeSet cs = cb1.build(); assertThat(cs.getParentPaths(), containsInAnyOrder( "/x", "/x/y", "/p", "/p/q", "/a/b" //Chopped paths )); assertEquals(cb2.getMaxPrefilterPathDepth(), cs.getMaxPrefilterPathDepth()); } @Test public void nullChangeSet() throws Exception{ ChangeSetBuilder cb1 = new ChangeSetBuilder(10, 8); add(cb1, "1"); cb1.add(null); ChangeSet cs = cb1.build(); assertNull(cs.getParentNodeTypes()); assertNull(cs.getParentNodeNames()); assertNull(cs.getParentPaths()); assertNull(cs.getAllNodeTypes()); assertNull(cs.getPropertyNames()); } private static void add(ChangeSetBuilder cb, String suffix){ cb.addNodeType("nt-"+suffix) .addParentPath("p-"+suffix) .addParentNodeName("nn-"+suffix) .addParentNodeType("pnt-"+suffix) .addPropertyName("pn-"+suffix); } }
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.inferred.freebuilder.processor; import static org.inferred.freebuilder.processor.NamePicker.pickName; import static org.inferred.freebuilder.processor.model.ModelUtils.asElement; import static org.inferred.freebuilder.processor.model.ModelUtils.findAnnotationMirror; import static org.inferred.freebuilder.processor.model.ModelUtils.needsSafeVarargs; import static java.util.stream.Collectors.toList; import static javax.lang.model.element.Modifier.PUBLIC; import org.inferred.freebuilder.FreeBuilder; import org.inferred.freebuilder.processor.Datatype.Visibility; import org.inferred.freebuilder.processor.source.Excerpt; import org.inferred.freebuilder.processor.source.Excerpts; import org.inferred.freebuilder.processor.source.Type; import java.util.List; import java.util.Optional; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.ExecutableType; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; /** * Metadata about a buildable user type. * * <p>A <b>buildable</b> type is one with a Builder class providing a similar * API to proto or &#64;FreeBuilder:<ul> * <li> a public constructor, or static builder()/newBuilder() method; * <li> build(), buildPartial() and clear() methods; and * <li> a mergeFrom(Value) method. * </ul> */ @FreeBuilder public abstract class BuildableType { /** How to merge the values from one Builder into another. */ public enum MergeBuilderMethod { MERGE_DIRECTLY, BUILD_PARTIAL_AND_MERGE } /** How to convert a partial value into a Builder. */ public enum PartialToBuilderMethod { MERGE_DIRECTLY, TO_BUILDER_AND_MERGE } /** * Returns the parameterized buildable type. * * <p>This may be parameterized with any compatible types, including concrete types, wildcards, * type variables, or generic types containing any combination of the above. */ public abstract Type type(); /** Returns the builder type that will build instances of {@link #type()}. */ public abstract Type builderType(); public abstract MergeBuilderMethod mergeBuilder(); public abstract PartialToBuilderMethod partialToBuilder(); public abstract BuilderFactory builderFactory(); public abstract Excerpt suppressUnchecked(); /** Returns an excerpt calling the Builder factory method. */ public Excerpt newBuilder(BuilderFactory.TypeInference typeInference) { return builderFactory().newBuilder(builderType(), typeInference); } public static class Builder extends BuildableType_Builder {} public static Optional<DeclaredType> maybeBuilder( DeclaredType type, Elements elements, Types types) { TypeElement element = asElement(type); // Find the builder TypeElement builder = element.getEnclosedElements() .stream() .flatMap(TYPES) .filter(BuildableType::isBuilderType) .findAny() .orElse(null); if (builder == null) { return Optional.empty(); } // Parameterize the builder to match the element if (builder.getTypeParameters().size() != type.getTypeArguments().size()) { return Optional.empty(); } DeclaredType builderMirror = types.getDeclaredType(builder, type.getTypeArguments().toArray(new TypeMirror[0])); // Verify the builder can be constructed BuilderFactory builderFactory = BuilderFactory.from(builder).orElse(null); if (builderFactory == null) { return Optional.empty(); } /* * Verify essential methods are available. * * If the element is annotated @FreeBuilder, assume the necessary methods will be added. We * can't check directly as the builder superclass may not have been generated yet. To be * strictly correct, we should delay a round if an error type leaves us unsure about this kind * of API-changing decision, and then we would work with _any_ Builder-generating API. We * would need to drop out part of our own builder superclass, to prevent chains of dependent * buildable types leading to quadratic compilation times (not to mention cycles), and leave a * dangling super-superclass to pick up next round. As an optimization, though, we would * probably skip this for @FreeBuilder-types anyway, to avoid extra types whenever possible, * which leaves a lot of complicated code supporting a currently non-existent edge case. */ if (findAnnotationMirror(element, FreeBuilder.class).isPresent()) { // Make sure the user isn't preventing us generating required methods. if (methodIsObscured(builderMirror, elements, types, type, "build") || methodIsObscured(builderMirror, elements, types, type, "buildPartial") || methodIsObscured(builderMirror, elements, types, builderMirror, "clear") || methodIsObscured( builderMirror, elements, types, builderMirror, "mergeFrom", builderMirror) || methodIsObscured(builderMirror, elements, types, builderMirror, "mergeFrom", type)) { return Optional.empty(); } } else { List<ExecutableElement> methods = elements.getAllMembers(builder) .stream() .flatMap(METHODS) .filter(BuildableType::isCallableMethod) .collect(toList()); // Check there is a build() method if (!methods.stream().anyMatch(new IsBuildMethod("build", type, types))) { return Optional.empty(); } // Check there is a buildPartial() method if (!methods.stream().anyMatch(new IsBuildMethod("buildPartial", type, types))) { return Optional.empty(); } // Check there is a clear() method if (!methods.stream().anyMatch(BuildableType::isClearMethod)) { return Optional.empty(); } // Check there is a mergeFrom(Value) method if (!methods.stream().anyMatch(new IsMergeFromMethod(type, builderMirror, types))) { return Optional.empty(); } } return Optional.of(builderMirror); } private static boolean methodIsObscured( DeclaredType targetType, Elements elements, Types types, DeclaredType returnType, String methodName, DeclaredType... parameterTypes) { NameAndVisibility buildMethod = pickName(targetType, elements, types, returnType, methodName, parameterTypes); return buildMethod.name() != methodName || buildMethod.visibility() != Visibility.PUBLIC; } public static BuildableType create( DeclaredType datatype, DeclaredType builder, Elements elements, Types types) { BuilderFactory builderFactory = BuilderFactory.from(asElement(builder)).get(); MergeBuilderMethod mergeFromBuilderMethod = detectMergeFromBuilderMethod(builder, elements, types, asElement(datatype)); PartialToBuilderMethod partialToBuilderMethod = detectPartialToBuilderMethod(datatype, builder, elements, types); Excerpt suppressUnchecked = suppressUncheckedExcerptFor(datatype); return new Builder() .type(Type.from(datatype)) .builderType(Type.from(builder)) .mergeBuilder(mergeFromBuilderMethod) .partialToBuilder(partialToBuilderMethod) .builderFactory(builderFactory) .suppressUnchecked(suppressUnchecked) .build(); } private static MergeBuilderMethod detectMergeFromBuilderMethod( DeclaredType builder, Elements elements, Types types, TypeElement datatypeElement) { if (findAnnotationMirror(datatypeElement, FreeBuilder.class).isPresent()) { return MergeBuilderMethod.MERGE_DIRECTLY; } else { List<ExecutableElement> methods = elements.getAllMembers(asElement(builder)) .stream() .flatMap(METHODS) .filter(BuildableType::isCallableMethod) .collect(toList()); // Check whether there is a mergeFrom(Builder) method if (methods.stream().anyMatch(new IsMergeFromMethod(builder, builder, types))) { return MergeBuilderMethod.MERGE_DIRECTLY; } else { return MergeBuilderMethod.BUILD_PARTIAL_AND_MERGE; } } } private static PartialToBuilderMethod detectPartialToBuilderMethod( DeclaredType datatype, DeclaredType builder, Elements elements, Types types) { List<ExecutableElement> valueMethods = elements.getAllMembers(asElement(datatype)) .stream() .flatMap(METHODS) .filter(BuildableType::isCallableMethod) .collect(toList()); // Check whether there is a toBuilder() method if (valueMethods.stream().anyMatch(new IsToBuilderMethod(datatype, builder, types))) { return PartialToBuilderMethod.TO_BUILDER_AND_MERGE; } else { return PartialToBuilderMethod.MERGE_DIRECTLY; } } private static Excerpt suppressUncheckedExcerptFor(DeclaredType datatype) { if (needsSafeVarargs(datatype)) { return Excerpts.add("@SuppressWarnings(\"unchecked\")"); } else { return Excerpts.EMPTY; } } private static boolean isCallableMethod(ExecutableElement element) { boolean isMethod = (element.getKind() == ElementKind.METHOD); boolean isPublic = element.getModifiers().contains(Modifier.PUBLIC); boolean isNotStatic = !element.getModifiers().contains(Modifier.STATIC); boolean declaresNoExceptions = element.getThrownTypes().isEmpty(); return isMethod && isPublic && isNotStatic && declaresNoExceptions; } private static final class IsBuildMethod implements Predicate<ExecutableElement> { final String methodName; final TypeMirror builtType; final Types types; IsBuildMethod(String methodName, TypeMirror builtType, Types types) { this.methodName = methodName; this.builtType = builtType; this.types = types; } @Override public boolean test(ExecutableElement element) { if (!element.getParameters().isEmpty()) { return false; } if (!element.getSimpleName().contentEquals(methodName)) { return false; } if (!types.isSubtype(element.getReturnType(), builtType)) { return false; } return true; } } private static boolean isBuilderType(TypeElement element) { return element.getSimpleName().contentEquals("Builder") && element.getModifiers().contains(PUBLIC); } private static boolean isClearMethod(ExecutableElement element) { if (!element.getParameters().isEmpty()) { return false; } if (!element.getSimpleName().contentEquals("clear")) { return false; } return true; } private static final class IsMergeFromMethod implements Predicate<ExecutableElement> { final DeclaredType parameter; final DeclaredType builder; final Types types; IsMergeFromMethod(DeclaredType parameter, DeclaredType builder, Types types) { this.parameter = parameter; this.builder = builder; this.types = types; } @Override public boolean test(ExecutableElement element) { if (element.getParameters().size() != 1) { return false; } if (!element.getSimpleName().contentEquals("mergeFrom")) { return false; } ExecutableType method = (ExecutableType) types.asMemberOf(builder, element); if (!types.isSubtype(parameter, method.getParameterTypes().get(0))) { return false; } return true; } } private static final class IsToBuilderMethod implements Predicate<ExecutableElement> { final DeclaredType datatype; final TypeMirror builder; final Types types; IsToBuilderMethod(DeclaredType datatype, TypeMirror builder, Types types) { this.datatype = datatype; this.builder = builder; this.types = types; } @Override public boolean test(ExecutableElement element) { if (element.getParameters().size() != 0) { return false; } if (!element.getSimpleName().contentEquals("toBuilder")) { return false; } ExecutableType method = (ExecutableType) types.asMemberOf(datatype, element); if (!types.isSubtype(method.getReturnType(), builder)) { return false; } return true; } } private static final Function<Element, Stream<TypeElement>> TYPES = element -> (element.getKind().isClass() || element.getKind().isInterface()) ? Stream.of((TypeElement) element) : Stream.of(); private static final Function<Element, Stream<ExecutableElement>> METHODS = element -> (element.getKind() == ElementKind.METHOD) ? Stream.of((ExecutableElement) element) : Stream.of(); }
/** * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.rest.endpoint; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import org.jboss.pnc.auth.AuthenticationProvider; import org.jboss.pnc.core.exception.CoreException; import org.jboss.pnc.model.BuildConfigurationSet; import org.jboss.pnc.model.User; import org.jboss.pnc.rest.provider.BuildConfigurationProvider; import org.jboss.pnc.rest.provider.BuildConfigurationSetProvider; import org.jboss.pnc.rest.provider.BuildRecordProvider; import org.jboss.pnc.rest.provider.collection.CollectionInfo; import org.jboss.pnc.rest.restmodel.BuildConfigurationRest; import org.jboss.pnc.rest.restmodel.BuildConfigurationSetRest; import org.jboss.pnc.rest.restmodel.BuildRecordRest; import org.jboss.pnc.rest.restmodel.response.Page; import org.jboss.pnc.rest.restmodel.response.error.ErrorResponseRest; import org.jboss.pnc.rest.swagger.response.BuildConfigurationPage; import org.jboss.pnc.rest.swagger.response.BuildConfigurationSetPage; import org.jboss.pnc.rest.swagger.response.BuildConfigurationSetSingleton; import org.jboss.pnc.rest.swagger.response.BuildRecordPage; import org.jboss.pnc.rest.trigger.BuildTriggerer; import org.jboss.pnc.rest.validation.exceptions.ConflictedEntryException; import org.jboss.pnc.rest.validation.exceptions.ValidationException; import org.jboss.pnc.spi.builddriver.exception.BuildDriverException; import org.jboss.pnc.spi.datastore.Datastore; import org.jboss.pnc.spi.datastore.DatastoreException; import org.jboss.pnc.spi.repositorymanager.RepositoryManagerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.validation.Valid; import javax.validation.constraints.NotNull; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; import java.lang.invoke.MethodHandles; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.Objects; import java.util.stream.Collectors; import static org.jboss.pnc.rest.configuration.SwaggerConstants.CONFLICTED_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.CONFLICTED_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.INVALID_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.INVLID_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.NOT_FOUND_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.NOT_FOUND_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.NO_CONTENT_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.NO_CONTENT_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_INDEX_DEFAULT_VALUE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_INDEX_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_INDEX_QUERY_PARAM; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_SIZE_DEFAULT_VALUE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_SIZE_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_SIZE_QUERY_PARAM; import static org.jboss.pnc.rest.configuration.SwaggerConstants.QUERY_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.QUERY_QUERY_PARAM; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SERVER_ERROR_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SERVER_ERROR_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SORTING_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SORTING_QUERY_PARAM; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SUCCESS_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SUCCESS_DESCRIPTION; @Api(value = "/build-configuration-sets", description = "Set of related build configurations") @Path("/build-configuration-sets") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public class BuildConfigurationSetEndpoint extends AbstractEndpoint<BuildConfigurationSet, BuildConfigurationSetRest> { private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private BuildTriggerer buildTriggerer; @Context private HttpServletRequest httpServletRequest; @Inject private Datastore datastore; private BuildConfigurationSetProvider buildConfigurationSetProvider; private BuildConfigurationProvider buildConfigurationProvider; private BuildRecordProvider buildRecordProvider; public BuildConfigurationSetEndpoint() { } @Inject public BuildConfigurationSetEndpoint(BuildConfigurationSetProvider buildConfigurationSetProvider, BuildTriggerer buildTriggerer, BuildConfigurationProvider buildConfigurationProvider, BuildRecordProvider buildRecordProvider) { super(buildConfigurationSetProvider); this.buildConfigurationSetProvider = buildConfigurationSetProvider; this.buildTriggerer = buildTriggerer; this.buildConfigurationProvider = buildConfigurationProvider; this.buildRecordProvider = buildRecordProvider; } @ApiOperation(value = "Gets all Build Configuration Sets") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationSetPage.class), @ApiResponse(code = NO_CONTENT_CODE, message = NO_CONTENT_DESCRIPTION, response = BuildConfigurationSetPage.class), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET public Response getAll(@ApiParam(value = PAGE_INDEX_DESCRIPTION) @QueryParam(PAGE_INDEX_QUERY_PARAM) @DefaultValue(PAGE_INDEX_DEFAULT_VALUE) int pageIndex, @ApiParam(value = PAGE_SIZE_DESCRIPTION) @QueryParam(PAGE_SIZE_QUERY_PARAM) @DefaultValue(PAGE_SIZE_DEFAULT_VALUE) int pageSize, @ApiParam(value = SORTING_DESCRIPTION) @QueryParam(SORTING_QUERY_PARAM) String sort, @ApiParam(value = QUERY_DESCRIPTION, required = false) @QueryParam(QUERY_QUERY_PARAM) String q) { return super.getAll(pageIndex, pageSize, sort, q); } @ApiOperation(value = "Creates a new Build Configuration Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationSetSingleton.class), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = CONFLICTED_CODE, message = CONFLICTED_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @POST public Response createNew(@NotNull @Valid BuildConfigurationSetRest buildConfigurationSetRest, @Context UriInfo uriInfo) throws ValidationException { return super.createNew(buildConfigurationSetRest, uriInfo); } @ApiOperation(value = "Gets a specific Build Configuration Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationSetSingleton.class), @ApiResponse(code = NOT_FOUND_CODE, message = NOT_FOUND_DESCRIPTION, response = BuildConfigurationSetSingleton.class), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET @Path("/{id}") public Response getSpecific( @ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id) { return super.getSpecific(id); } @ApiOperation(value = "Updates an existing Build Configuration Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = CONFLICTED_CODE, message = CONFLICTED_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @PUT @Path("/{id}") public Response update(@ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id, @NotNull @Valid BuildConfigurationSetRest buildConfigurationSetRest) throws ValidationException { return super.update(id, buildConfigurationSetRest); } @ApiOperation(value = "Removes a specific Build Configuration Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @DELETE @Path("/{id}") public Response deleteSpecific(@ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id) throws ValidationException { return super.delete(id); } @ApiOperation(value = "Gets the Configurations for the Specified Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationPage.class), @ApiResponse(code = NO_CONTENT_CODE, message = NO_CONTENT_DESCRIPTION, response = BuildConfigurationPage.class), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET @Path("/{id}/build-configurations") public Response getConfigurations(@ApiParam(value = PAGE_INDEX_DESCRIPTION) @QueryParam(PAGE_INDEX_QUERY_PARAM) @DefaultValue(PAGE_INDEX_DEFAULT_VALUE) int pageIndex, @ApiParam(value = PAGE_SIZE_DESCRIPTION) @QueryParam(PAGE_SIZE_QUERY_PARAM) @DefaultValue(PAGE_SIZE_DEFAULT_VALUE) int pageSize, @ApiParam(value = SORTING_DESCRIPTION) @QueryParam(SORTING_QUERY_PARAM) String sort, @ApiParam(value = QUERY_DESCRIPTION, required = false) @QueryParam(QUERY_QUERY_PARAM) String q, @ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id) { return fromCollection(buildConfigurationProvider.getAllForBuildConfigurationSet(pageIndex, pageSize, sort, q, id)); } @ApiOperation(value = "Adds a configuration to the Specified Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @POST @Path("/{id}/build-configurations") public Response addConfiguration( @ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id, BuildConfigurationRest buildConfig) throws ConflictedEntryException { buildConfigurationSetProvider.addConfiguration(id, buildConfig.getId()); return fromEmpty(); } @ApiOperation(value = "Removes a configuration from the specified config set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @DELETE @Path("/{id}/build-configurations/{configId}") public Response removeConfiguration( @ApiParam(value = "Build configuration set id", required = true) @PathParam("id") Integer id, @ApiParam(value = "Build configuration id", required = true) @PathParam("configId") Integer configId) { buildConfigurationSetProvider.removeConfiguration(id, configId); return fromEmpty(); } @ApiOperation(value = "Gets all build records associated with the contained build configurations") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildRecordPage.class), @ApiResponse(code = NO_CONTENT_CODE, message = NO_CONTENT_DESCRIPTION, response = BuildRecordPage.class), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET @Path("/{id}/build-records") public Response getBuildRecords( @ApiParam(value = "Build configuration set id", required = true) @PathParam("id") Integer id, @ApiParam(value = PAGE_INDEX_DESCRIPTION) @QueryParam(PAGE_INDEX_QUERY_PARAM) @DefaultValue(PAGE_INDEX_DEFAULT_VALUE) int pageIndex, @ApiParam(value = PAGE_SIZE_DESCRIPTION) @QueryParam(PAGE_SIZE_QUERY_PARAM) @DefaultValue(PAGE_SIZE_DEFAULT_VALUE) int pageSize, @ApiParam(value = SORTING_DESCRIPTION) @QueryParam(SORTING_QUERY_PARAM) String sort, @ApiParam(value = QUERY_DESCRIPTION, required = false) @QueryParam(QUERY_QUERY_PARAM) String q) { return fromCollection(buildRecordProvider.getAllForBuildConfigSetRecord(pageIndex, pageSize, sort, q, id)); } @ApiOperation(value = "Builds the Configurations for the Specified Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildRecordPage.class), @ApiResponse(code = INVLID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @POST @Path("/{id}/build") @Consumes(MediaType.WILDCARD) public Response build( @ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id, @ApiParam(value = "Optional Callback URL", required = false) @QueryParam("callbackUrl") String callbackUrl, @ApiParam(value = "Rebuild all dependencies") @QueryParam("rebuildAll") @DefaultValue("false") boolean rebuildAll, @Context UriInfo uriInfo) throws InterruptedException, CoreException, DatastoreException, BuildDriverException, RepositoryManagerException, MalformedURLException { logger.info("Executing build configuration set id: " + id ); AuthenticationProvider authProvider = new AuthenticationProvider(httpServletRequest); String loggedUser = authProvider.getUserName(); User currentUser = null; if(loggedUser != null && loggedUser != "") { currentUser = datastore.retrieveUserByUsername(loggedUser); if(currentUser != null) { currentUser.setLoginToken(authProvider.getTokenString()); } } if(currentUser == null) { //TODO remove user creation currentUser = User.Builder.newBuilder() .username(loggedUser) .firstName(authProvider.getFirstName()) .lastName(authProvider.getLastName()) .email(authProvider.getEmail()).build(); datastore.createNewUser(currentUser); } BuildTriggerer.BuildConfigurationSetTriggerResult result = null; // if callbackUrl is provided trigger build accordingly if (callbackUrl == null || callbackUrl.isEmpty()) { result = buildTriggerer.triggerBuildConfigurationSet(id, currentUser, rebuildAll); } else { result = buildTriggerer.triggerBuildConfigurationSet(id, currentUser, rebuildAll, new URL(callbackUrl)); } UriBuilder uriBuilder = UriBuilder.fromUri(uriInfo.getBaseUri()).path("/build-config-set-records/{id}"); URI uri = uriBuilder.build(result.getBuildRecordSetId()); Page<BuildRecordRest> resultsToBeReturned = new Page<>(new CollectionInfo<>(0, result.getBuildRecordsIds().size(), 1, result.getBuildRecordsIds().stream() .map(runningBuildRecordId -> buildRecordProvider.getSpecificRunning(runningBuildRecordId)) .filter(Objects::nonNull) .collect(Collectors.toList()))); return Response.ok(uri).header("location", uri).entity(resultsToBeReturned).build(); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * @author max */ package org.jetbrains.java.generate; import com.intellij.codeInsight.hint.HintManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.editor.VisualPosition; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.Nullable; import org.jetbrains.java.generate.config.*; import org.jetbrains.java.generate.exception.GenerateCodeException; import org.jetbrains.java.generate.psi.PsiAdapter; import org.jetbrains.java.generate.template.TemplateResource; import org.jetbrains.java.generate.view.MethodExistsDialog; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.StringTokenizer; public class GenerateToStringWorker { private static final Logger logger = Logger.getInstance("#" + GenerateToStringWorker.class.getName()); private final Editor editor; private final PsiClass clazz; private final Config config; private final boolean hasOverrideAnnotation; public GenerateToStringWorker(PsiClass clazz, Editor editor, boolean insertAtOverride) { this.clazz = clazz; this.editor = editor; this.config = GenerateToStringContext.getConfig(); this.hasOverrideAnnotation = insertAtOverride; } /** * Creates the <code>toString</code> method. * * @param selectedMembers the selected members as both {@link com.intellij.psi.PsiField} and {@link com.intellij.psi.PsiMethod}. * @param policy conflict resolution policy * @param params additional parameters stored with key/value in the map. * @param template the template to use * @return the created method, null if the method is not created due the user cancels this operation * @throws GenerateCodeException is thrown when there is an error generating the javacode. * @throws IncorrectOperationException is thrown by IDEA. */ @Nullable private PsiMethod createToStringMethod(Collection<PsiMember> selectedMembers, ConflictResolutionPolicy policy, Map<String, String> params, TemplateResource template) throws IncorrectOperationException, GenerateCodeException { // generate code using velocity String body = GenerationUtil.velocityGenerateCode(clazz, selectedMembers, params, template.getMethodBody(), config.getSortElements(), config.isUseFullyQualifiedName()); if (logger.isDebugEnabled()) logger.debug("Method body generated from Velocity:\n" + body); // fix weird linebreak problem in IDEA #3296 and later body = StringUtil.convertLineSeparators(body); // create psi newMethod named toString() final JVMElementFactory topLevelFactory = JVMElementFactories.getFactory(clazz.getLanguage(), clazz.getProject()); if (topLevelFactory == null) { return null; } PsiMethod newMethod; try { newMethod = topLevelFactory.createMethodFromText(template.getMethodSignature() + " { " + body + " }", clazz); CodeStyleManager.getInstance(clazz.getProject()).reformat(newMethod); } catch (IncorrectOperationException ignore) { HintManager.getInstance().showErrorHint(editor, "'toString()' method could not be created from template '" + template.getFileName() + '\''); return null; } // insertNewMethod conflict resolution policy (add/replace, duplicate, cancel) PsiMethod existingMethod = clazz.findMethodBySignature(newMethod, false); PsiMethod toStringMethod = policy.applyMethod(clazz, existingMethod, newMethod, editor); if (toStringMethod == null) { return null; // user cancelled so return null } if (hasOverrideAnnotation) { toStringMethod.getModifierList().addAnnotation("java.lang.Override"); } // applyJavaDoc conflict resolution policy (add or keep existing) String existingJavaDoc = params.get("existingJavaDoc"); String newJavaDoc = template.getJavaDoc(); if (existingJavaDoc != null || newJavaDoc != null) { // generate javadoc using velocity newJavaDoc = GenerationUtil.velocityGenerateCode(clazz, selectedMembers, params, newJavaDoc, config.getSortElements(), config.isUseFullyQualifiedName()); if (logger.isDebugEnabled()) logger.debug("JavaDoc body generated from Velocity:\n" + newJavaDoc); GenerationUtil.applyJavaDoc(toStringMethod, existingJavaDoc, newJavaDoc); } // return the created method return toStringMethod; } public void execute(Collection<PsiMember> members, TemplateResource template) throws IncorrectOperationException, GenerateCodeException { // decide what to do if the method already exists ConflictResolutionPolicy resolutionPolicy = exitsMethodDialog(template); // what insert policy should we use? resolutionPolicy.setNewMethodStrategy(getStrategy(config.getInsertNewMethodInitialOption())); // user didn't click cancel so go on Map<String, String> params = new HashMap<>(); // before beforeCreateToStringMethod(params, template); // generate method PsiMethod method = createToStringMethod(members, resolutionPolicy, params, template); // after, if method was generated (not cancel policy) if (method != null) { afterCreateToStringMethod(method, params, template); } } private static InsertNewMethodStrategy getStrategy(InsertWhere option) { switch (option) { case AFTER_EQUALS_AND_HASHCODE: return InsertAfterEqualsHashCodeStrategy.getInstance(); case AT_CARET: return InsertAtCaretStrategy.getInstance(); case AT_THE_END_OF_A_CLASS: return InsertLastStrategy.getInstance(); } return InsertLastStrategy.getInstance(); } /** * This method gets the choice if there is an existing <code>toString</code> method. * <br/> 1) If there is a settings to always override use this. * <br/> 2) Prompt a dialog and let the user decide. * * @param template the chosen template to use * @return the policy the user selected (never null) */ protected ConflictResolutionPolicy exitsMethodDialog(TemplateResource template) { final DuplicationPolicy dupPolicy = config.getReplaceDialogInitialOption(); if (dupPolicy == DuplicationPolicy.ASK) { PsiMethod existingMethod = PsiAdapter.findMethodByName(clazz, template.getTargetMethodName()); if (existingMethod != null) { return MethodExistsDialog.showDialog(template.getTargetMethodName()); } } else if (dupPolicy == DuplicationPolicy.REPLACE) { return ReplacePolicy.getInstance(); } // If there is no conflict, duplicate policy will do the trick return DuplicatePolicy.getInstance(); } /** * This method is executed just before the <code>toString</code> method is created or updated. * * @param params additional parameters stored with key/value in the map. * @param template the template to use */ private void beforeCreateToStringMethod(Map<String, String> params, TemplateResource template) { PsiMethod existingMethod = PsiAdapter.findMethodByName(clazz, template.getTargetMethodName()); // find the existing method if (existingMethod != null && existingMethod.getDocComment() != null) { PsiDocComment doc = existingMethod.getDocComment(); if (doc != null) { params.put("existingJavaDoc", doc.getText()); } } } /** * This method is executed just after the <code>toString</code> method is created or updated. * * @param method the newly created/updated <code>toString</code> method. * @param params additional parameters stored with key/value in the map. * @param template the template to use * @throws IncorrectOperationException is thrown by IDEA */ private void afterCreateToStringMethod(PsiMethod method, Map<String, String> params, TemplateResource template) { PsiFile containingFile = clazz.getContainingFile(); if (containingFile instanceof PsiJavaFile) { final PsiJavaFile javaFile = (PsiJavaFile)containingFile; if (params.get("autoImportPackages") != null) { // keep this for old user templates autoImportPackages(javaFile, params.get("autoImportPackages")); } } method = (PsiMethod)JavaCodeStyleManager.getInstance(clazz.getProject()).shortenClassReferences(method); // jump to method if (!config.isJumpToMethod() || editor == null) { return; } int offset = method.getTextOffset(); if (offset <= 2) { return; } VisualPosition vp = editor.offsetToVisualPosition(offset); if (logger.isDebugEnabled()) logger.debug("Moving/Scrolling caret to " + vp + " (offset=" + offset + ")"); editor.getCaretModel().moveToVisualPosition(vp); editor.getScrollingModel().scrollToCaret(ScrollType.CENTER_DOWN); } /** * Automatic import the packages. * * @param packageNames names of packages (must end with .* and be separated by ; or ,) * @throws IncorrectOperationException error adding imported package */ private static void autoImportPackages(PsiJavaFile psiJavaFile, String packageNames) throws IncorrectOperationException { StringTokenizer tok = new StringTokenizer(packageNames, ","); while (tok.hasMoreTokens()) { String packageName = tok.nextToken().trim(); // trim in case of space if (logger.isDebugEnabled()) logger.debug("Auto importing package: " + packageName); PsiAdapter.addImportStatement(psiJavaFile, packageName); } } }
package play.test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.nio.channels.Channels; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.regex.Pattern; import org.apache.commons.lang.ArrayUtils; import org.junit.Before; import com.ning.http.client.FluentCaseInsensitiveStringsMap; import com.ning.http.client.multipart.FilePart; import com.ning.http.client.multipart.MultipartBody; import com.ning.http.client.multipart.MultipartUtils; import com.ning.http.client.multipart.Part; import com.ning.http.client.multipart.StringPart; import play.Invoker; import play.Invoker.InvocationContext; import play.classloading.enhancers.ControllersEnhancer.ControllerInstrumentation; import play.mvc.ActionInvoker; import play.mvc.Controller; import play.mvc.Http; import play.mvc.Http.Request; import play.mvc.Http.Response; import play.mvc.Router.ActionDefinition; import play.mvc.Scope.RenderArgs; /** * Application tests support */ public abstract class FunctionalTest extends BaseTest { public static final String APPLICATION_X_WWW_FORM_URLENCODED = "application/x-www-form-urlencoded"; public static final String MULTIPART_FORM_DATA = "multipart/form-data"; private static Map<String, Http.Cookie> savedCookies; // cookies stored // between calls private static Map<String, Object> renderArgs = new HashMap<>(); @Before public void clearCookies() { savedCookies = null; } // Requests public static Response GET(Object url) { return GET(newRequest(), url); } /** * sends a GET request to the application under tests. * * @param url * relative url such as <em>"/products/1234"</em> * @param followRedirect * indicates if request have to follow redirection (status 302) * @return the response */ public static Response GET(Object url, boolean followRedirect) { Response response = GET(url); if (Http.StatusCode.FOUND == response.status && followRedirect) { Http.Header redirectedTo = response.headers.get("Location"); String location = redirectedTo.value(); if (location.contains("http")) { java.net.URL redirectedUrl = null; try { redirectedUrl = new java.net.URL(redirectedTo.value()); } catch (MalformedURLException e) { throw new RuntimeException(e); } response = GET(redirectedUrl.getPath()); } else { response = GET(location); } } return response; } /** * sends a GET request to the application under tests. * * @param request * @param url * relative url such as <em>"/products/1234"</em> * @return the response */ public static Response GET(Request request, Object url) { String path = ""; String queryString = ""; String turl = url.toString(); if (turl.contains("?")) { path = turl.substring(0, turl.indexOf("?")); queryString = turl.substring(turl.indexOf("?") + 1); } else { path = turl; } request.method = "GET"; request.url = turl; request.path = path; request.querystring = queryString; request.body = new ByteArrayInputStream(new byte[0]); if (savedCookies != null) request.cookies = savedCookies; return makeRequest(request); } // convenience methods public static Response POST(Object url) { return POST(url, APPLICATION_X_WWW_FORM_URLENCODED, ""); } public static Response POST(Request request, Object url) { return POST(request, url, APPLICATION_X_WWW_FORM_URLENCODED, ""); } public static Response POST(Object url, String contenttype, String body) { return POST(newRequest(), url, contenttype, body); } public static Response POST(Request request, Object url, String contenttype, String body) { return POST(request, url, contenttype, new ByteArrayInputStream(body.getBytes())); } public static Response POST(Object url, String contenttype, InputStream body) { return POST(newRequest(), url, contenttype, body); } /** * Sends a POST request to the application under tests. * * @param request * @param url * relative url such as <em>"/products/1234"</em> * @param contenttype * content-type of the request * @param body * posted data * @return the response */ public static Response POST(Request request, Object url, String contenttype, InputStream body) { String path = ""; String queryString = ""; String turl = url.toString(); if (turl.contains("?")) { path = turl.substring(0, turl.indexOf("?")); queryString = turl.substring(turl.indexOf("?") + 1); } else { path = turl; } request.method = "POST"; request.contentType = contenttype; request.url = turl; request.path = path; request.querystring = queryString; request.body = body; if (savedCookies != null) request.cookies = savedCookies; return makeRequest(request); } /** * Sends a POST request to the application under tests as a multipart form. * Designed for file upload testing. * * @param url * relative url such as <em>"/products/1234"</em> * @param parameters * map of parameters to be posted * @param files * map containing files to be uploaded * @return the response */ public static Response POST(Object url, Map<String, String> parameters, Map<String, File> files) { return POST(newRequest(), url, parameters, files); } public static Response POST(Object url, Map<String, String> parameters) { return POST(newRequest(), url, parameters, new HashMap<String, File>()); } public static Response POST(Request request, Object url, Map<String, String> parameters, Map<String, File> files) { List<Part> parts = new ArrayList<>(); for (String key : parameters.keySet()) { StringPart stringPart = new StringPart(key, parameters.get(key), request.contentType, Charset.forName(request.encoding)); parts.add(stringPart); } for (Map.Entry<String, File> entry : files.entrySet()) { File file = entry.getValue(); if (file != null) { Part filePart = new FilePart(entry.getKey(), entry.getValue()); parts.add(filePart); } } MultipartBody requestEntity = null; /* * ^1 MultipartBody::read is not working (if parts.isEmpty() == true) * byte[] array = null; **/ _ByteArrayOutputStream baos = null; try { requestEntity = MultipartUtils.newMultipartBody(parts, new FluentCaseInsensitiveStringsMap()); request.headers.putAll(ArrayUtils .toMap(new Object[][] { { "content-type", new Http.Header("content-type", requestEntity.getContentType()) } })); long contentLength = requestEntity.getContentLength(); if (contentLength < Integer.MIN_VALUE || contentLength > Integer.MAX_VALUE) { throw new IllegalArgumentException(contentLength + " cannot be cast to int without changing its value."); } // array = new byte[(int) contentLength]; // ^1 // requestEntity.read(ByteBuffer.wrap(array)); // ^1 baos = new _ByteArrayOutputStream((int) contentLength); requestEntity.transferTo(0, Channels.newChannel(baos)); } catch (IOException e) { throw new RuntimeException(e); } // InputStream body = new ByteArrayInputStream(array != null ? array : // new byte[0]); // ^1 InputStream body = new ByteArrayInputStream(baos != null ? baos.getByteArray() : new byte[0]); return POST(request, url, MULTIPART_FORM_DATA, body); } public static Response PUT(Object url, String contenttype, String body) { return PUT(newRequest(), url, contenttype, body); } /** * Sends a PUT request to the application under tests. * * @param request * @param url * relative url such as <em>"/products/1234"</em> * @param contenttype * content-type of the request * @param body * data to send * @return the response */ public static Response PUT(Request request, Object url, String contenttype, String body) { String path = ""; String queryString = ""; String turl = url.toString(); if (turl.contains("?")) { path = turl.substring(0, turl.indexOf("?")); queryString = turl.substring(turl.indexOf("?") + 1); } else { path = turl; } request.method = "PUT"; request.contentType = contenttype; request.url = turl; request.path = path; request.querystring = queryString; request.body = new ByteArrayInputStream(body.getBytes()); if (savedCookies != null) request.cookies = savedCookies; return makeRequest(request); } public static Response DELETE(String url) { return DELETE(newRequest(), url); } /** * Sends a DELETE request to the application under tests. * * @param request * @param url * relative url eg. <em>"/products/1234"</em> * @return the response */ public static Response DELETE(Request request, Object url) { String path = ""; String queryString = ""; String turl = url.toString(); if (turl.contains("?")) { path = turl.substring(0, turl.indexOf("?")); queryString = turl.substring(turl.indexOf("?") + 1); } else { path = turl; } request.method = "DELETE"; request.url = turl; request.path = path; request.querystring = queryString; if (savedCookies != null) request.cookies = savedCookies; request.body = new ByteArrayInputStream(new byte[0]); return makeRequest(request); } public static void makeRequest(final Request request, final Response response) { final CountDownLatch actionCompleted = new CountDownLatch(1); TestEngine.functionalTestsExecutor.submit(new Invoker.Invocation() { @Override public void execute() throws Exception { renderArgs.clear(); ActionInvoker.invoke(request, response); if (RenderArgs.current().data != null) { renderArgs.putAll(RenderArgs.current().data); } } @Override public void onSuccess() throws Exception { try { super.onSuccess(); } finally { onActionCompleted(); } } @Override public void onException(Throwable e) { try { super.onException(e); } finally { onActionCompleted(); } } private void onActionCompleted() { actionCompleted.countDown(); } @Override public InvocationContext getInvocationContext() { ActionInvoker.resolve(request, response); return new InvocationContext(Http.invocationType, request.invokedMethod.getAnnotations(), request.invokedMethod.getDeclaringClass().getAnnotations()); } }); try { if (!actionCompleted.await(30, TimeUnit.SECONDS)) { throw new TimeoutException("Request did not complete in time"); } if (savedCookies == null) { savedCookies = new HashMap<>(); } for (Map.Entry<String, Http.Cookie> e : response.cookies.entrySet()) { // If Max-Age is unset, browsers discard on exit; if // 0, they discard immediately. if (e.getValue().maxAge == null || e.getValue().maxAge > 0) { savedCookies.put(e.getKey(), e.getValue()); } else { // cookies with maxAge zero still remove a previously // existing cookie, // like PLAY_FLASH. savedCookies.remove(e.getKey()); } } response.out.flush(); } catch (Exception ex) { throw new RuntimeException(ex); } } public static Response makeRequest(Request request) { Response response = newResponse(); makeRequest(request, response); if (response.status == 302) { // redirect // if Location-header is pressent, fix it to "look like" a // functional-test-url Http.Header locationHeader = response.headers.get("Location"); if (locationHeader != null) { String locationUrl = locationHeader.value(); if (locationUrl.startsWith("http://localhost/")) { locationHeader.values.clear(); locationHeader.values.add(locationUrl.substring(16));// skip // 'http://localhost' } } } return response; } public static Response newResponse() { Response response = new Response(); response.out = new ByteArrayOutputStream(); return response; } public static Request newRequest() { Request request = Request.createRequest(null, "GET", "/", "", null, null, null, null, false, 80, "localhost", false, null, null); return request; } // Assertions /** * Asserts a <em>2OO Success</em> response * * @param response * server response */ public static void assertIsOk(Response response) { assertStatus(200, response); } /** * Asserts a <em>404 (not found)</em> response * * @param response * server response */ public static void assertIsNotFound(Response response) { assertStatus(404, response); } /** * Asserts response status code * * @param status * expected HTTP response code * @param response * server response */ public static void assertStatus(int status, Response response) { assertEquals("Response status ", (Object) status, response.status); } /** * Exact equality assertion on response body * * @param content * expected body content * @param response * server response */ public static void assertContentEquals(String content, Response response) { assertEquals(content, getContent(response)); } /** * Asserts response body matched a pattern or contains some text. * * @param pattern * a regular expression pattern or a regular text, ( which must * be escaped using Pattern.quote) * @param response * server response */ public static void assertContentMatch(String pattern, Response response) { Pattern ptn = Pattern.compile(pattern); boolean ok = ptn.matcher(getContent(response)).find(); assertTrue("Response content does not match '" + pattern + "'", ok); } /** * Verify response charset encoding, as returned by the server in the * Content-Type header. Be aware that if no charset is returned, assertion * will fail. * * @param charset * expected charset encoding such as "utf-8" or "iso8859-1". * @param response * server response */ public static void assertCharset(String charset, Response response) { int pos = response.contentType.indexOf("charset=") + 8; String responseCharset = (pos > 7) ? response.contentType.substring(pos).toLowerCase() : ""; assertEquals("Response charset", charset.toLowerCase(), responseCharset); } /** * Verify the response content-type * * @param contentType * expected content-type without any charset extension, such as * "text/html" * @param response * server response */ public static void assertContentType(String contentType, Response response) { String responseContentType = response.contentType; assertNotNull("Response contentType missing", responseContentType); assertTrue("Response contentType unmatched : '" + contentType + "' !~ '" + responseContentType + "'", responseContentType.startsWith(contentType)); } /** * Exact equality assertion on a response header value * * @param headerName * header to verify. case-insensitive * @param value * expected header value * @param response * server response */ public static void assertHeaderEquals(String headerName, String value, Response response) { assertNotNull("Response header " + headerName + " missing", response.headers.get(headerName)); assertEquals("Response header " + headerName + " mismatch", value, response.headers.get(headerName).value()); } /** * obtains the response body as a string * * @param response * server response * @return the response body as an <em>utf-8 string</em> */ public static String getContent(Response response) { byte[] data = response.out.toByteArray(); try { return new String(data, response.encoding); } catch (UnsupportedEncodingException ex) { throw new RuntimeException(ex); } } public static Object renderArgs(String name) { return renderArgs.get(name); } // Utils public void sleep(int seconds) { try { Thread.sleep(seconds * 1000); } catch (Exception e) { throw new RuntimeException(e); } } protected static URL reverse() { ControllerInstrumentation.stopActionCall(); ActionDefinition actionDefinition = new ActionDefinition(); Controller._currentReverse.set(actionDefinition); return new URL(actionDefinition); } public static class URL { ActionDefinition actionDefinition; URL(ActionDefinition actionDefinition) { this.actionDefinition = actionDefinition; } @Override public String toString() { return actionDefinition.url; } } public static final class _ByteArrayOutputStream extends ByteArrayOutputStream { public _ByteArrayOutputStream(int size) { super(size); } public byte[] getByteArray() { return this.buf; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.arrow.vector.complex.writer; import static org.junit.Assert.*; import java.util.HashSet; import java.util.List; import java.util.Set; import io.netty.buffer.ArrowBuf; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.SchemaChangeCallBack; import org.apache.arrow.vector.complex.ListVector; import org.apache.arrow.vector.complex.MapVector; import org.apache.arrow.vector.complex.NullableMapVector; import org.apache.arrow.vector.complex.UnionVector; import org.apache.arrow.vector.complex.impl.ComplexWriterImpl; import org.apache.arrow.vector.complex.impl.SingleMapReaderImpl; import org.apache.arrow.vector.complex.impl.UnionListReader; import org.apache.arrow.vector.complex.impl.UnionListWriter; import org.apache.arrow.vector.complex.impl.UnionReader; import org.apache.arrow.vector.complex.impl.UnionWriter; import org.apache.arrow.vector.complex.reader.BaseReader.MapReader; import org.apache.arrow.vector.complex.reader.FieldReader; import org.apache.arrow.vector.complex.writer.BaseWriter.ComplexWriter; import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; import org.apache.arrow.vector.holders.IntHolder; import org.apache.arrow.vector.holders.NullableTimeStampNanoTZHolder; import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.ArrowType.ArrowTypeID; import org.apache.arrow.vector.types.pojo.ArrowType.Int; import org.apache.arrow.vector.types.pojo.ArrowType.Struct; import org.apache.arrow.vector.types.pojo.ArrowType.Timestamp; import org.apache.arrow.vector.types.pojo.ArrowType.Union; import org.apache.arrow.vector.types.pojo.ArrowType.Utf8; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.arrow.vector.util.CallBack; import org.apache.arrow.vector.util.DateUtility; import org.apache.arrow.vector.util.JsonStringArrayList; import org.apache.arrow.vector.util.JsonStringHashMap; import org.apache.arrow.vector.util.Text; import org.apache.arrow.vector.util.TransferPair; import org.joda.time.LocalDateTime; import org.junit.Assert; import org.junit.Test; public class TestComplexWriter { private static final BufferAllocator allocator = new RootAllocator(Integer.MAX_VALUE); private static final int COUNT = 100; @Test public void simpleNestedTypes() { MapVector parent = populateMapVector(null); MapReader rootReader = new SingleMapReaderImpl(parent).reader("root"); for (int i = 0; i < COUNT; i++) { rootReader.setPosition(i); Assert.assertEquals(i, rootReader.reader("int").readInteger().intValue()); Assert.assertEquals(i, rootReader.reader("bigInt").readLong().longValue()); } parent.close(); } @Test public void transferPairSchemaChange() { SchemaChangeCallBack callBack1 = new SchemaChangeCallBack(); SchemaChangeCallBack callBack2 = new SchemaChangeCallBack(); MapVector parent = populateMapVector(callBack1); TransferPair tp = parent.getTransferPair("newVector", allocator, callBack2); ComplexWriter writer = new ComplexWriterImpl("newWriter", parent); MapWriter rootWriter = writer.rootAsMap(); IntWriter intWriter = rootWriter.integer("newInt"); intWriter.writeInt(1); writer.setValueCount(1); assertTrue(callBack1.getSchemaChangedAndReset()); // The second vector should not have registered a schema change assertFalse(callBack1.getSchemaChangedAndReset()); } private MapVector populateMapVector(CallBack callBack) { MapVector parent = new MapVector("parent", allocator, new FieldType(false, Struct.INSTANCE, null, null), callBack); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); IntWriter intWriter = rootWriter.integer("int"); BigIntWriter bigIntWriter = rootWriter.bigInt("bigInt"); for (int i = 0; i < COUNT; i++) { rootWriter.start(); intWriter.writeInt(i); bigIntWriter.writeBigInt(i); rootWriter.end(); } writer.setValueCount(COUNT); return parent; } @Test public void nullableMap() { try (MapVector mapVector = MapVector.empty("parent", allocator)) { ComplexWriter writer = new ComplexWriterImpl("root", mapVector); MapWriter rootWriter = writer.rootAsMap(); for (int i = 0; i < COUNT; i++) { rootWriter.start(); if (i % 2 == 0) { MapWriter mapWriter = rootWriter.map("map"); mapWriter.setPosition(i); mapWriter.start(); mapWriter.bigInt("nested").writeBigInt(i); mapWriter.end(); } rootWriter.end(); } writer.setValueCount(COUNT); checkNullableMap(mapVector); } } /** * This test is similar to {@link #nullableMap()} ()} but we get the inner map writer once at the beginning */ @Test public void nullableMap2() { try (MapVector mapVector = MapVector.empty("parent", allocator)) { ComplexWriter writer = new ComplexWriterImpl("root", mapVector); MapWriter rootWriter = writer.rootAsMap(); MapWriter mapWriter = rootWriter.map("map"); for (int i = 0; i < COUNT; i++) { rootWriter.start(); if (i % 2 == 0) { mapWriter.setPosition(i); mapWriter.start(); mapWriter.bigInt("nested").writeBigInt(i); mapWriter.end(); } rootWriter.end(); } writer.setValueCount(COUNT); checkNullableMap(mapVector); } } private void checkNullableMap(MapVector mapVector) { MapReader rootReader = new SingleMapReaderImpl(mapVector).reader("root"); for (int i = 0; i < COUNT; i++) { rootReader.setPosition(i); assertTrue("index is set: " + i, rootReader.isSet()); FieldReader map = rootReader.reader("map"); if (i % 2 == 0) { assertTrue("index is set: " + i, map.isSet()); assertNotNull("index is set: " + i, map.readObject()); assertEquals(i, map.reader("nested").readLong().longValue()); } else { assertFalse("index is not set: " + i, map.isSet()); assertNull("index is not set: " + i, map.readObject()); } } } @Test public void testList() { MapVector parent = MapVector.empty("parent", allocator); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); rootWriter.start(); rootWriter.bigInt("int").writeBigInt(0); rootWriter.list("list").startList(); rootWriter.list("list").bigInt().writeBigInt(0); rootWriter.list("list").endList(); rootWriter.end(); rootWriter.start(); rootWriter.bigInt("int").writeBigInt(1); rootWriter.end(); writer.setValueCount(2); MapReader rootReader = new SingleMapReaderImpl(parent).reader("root"); rootReader.setPosition(0); assertTrue("row 0 list is not set", rootReader.reader("list").isSet()); assertEquals(Long.valueOf(0), rootReader.reader("list").reader().readLong()); rootReader.setPosition(1); assertFalse("row 1 list is set", rootReader.reader("list").isSet()); } @Test public void listScalarType() { ListVector listVector = ListVector.empty("list", allocator); listVector.allocateNew(); UnionListWriter listWriter = new UnionListWriter(listVector); for (int i = 0; i < COUNT; i++) { listWriter.startList(); for (int j = 0; j < i % 7; j++) { if (j%2 == 0) { listWriter.writeInt(j); } else { IntHolder holder = new IntHolder(); holder.value = j; listWriter.write(holder); } } listWriter.endList(); } listWriter.setValueCount(COUNT); UnionListReader listReader = new UnionListReader(listVector); for (int i = 0; i < COUNT; i++) { listReader.setPosition(i); for (int j = 0; j < i % 7; j++) { listReader.next(); assertEquals(j, listReader.reader().readInteger().intValue()); } } } @Test public void listScalarTypeNullable() { ListVector listVector = ListVector.empty("list", allocator); listVector.allocateNew(); UnionListWriter listWriter = new UnionListWriter(listVector); for (int i = 0; i < COUNT; i++) { if (i % 2 == 0) { listWriter.setPosition(i); listWriter.startList(); for (int j = 0; j < i % 7; j++) { listWriter.writeInt(j); } listWriter.endList(); } } listWriter.setValueCount(COUNT); UnionListReader listReader = new UnionListReader(listVector); for (int i = 0; i < COUNT; i++) { listReader.setPosition(i); if (i % 2 == 0) { assertTrue("index is set: " + i, listReader.isSet()); assertEquals("correct length at: " + i, i % 7, ((List<?>)listReader.readObject()).size()); } else { assertFalse("index is not set: " + i, listReader.isSet()); assertNull("index is not set: " + i, listReader.readObject()); } } } @Test public void listMapType() { ListVector listVector = ListVector.empty("list", allocator); listVector.allocateNew(); UnionListWriter listWriter = new UnionListWriter(listVector); MapWriter mapWriter = listWriter.map(); for (int i = 0; i < COUNT; i++) { listWriter.startList(); for (int j = 0; j < i % 7; j++) { mapWriter.start(); mapWriter.integer("int").writeInt(j); mapWriter.bigInt("bigInt").writeBigInt(j); mapWriter.end(); } listWriter.endList(); } listWriter.setValueCount(COUNT); UnionListReader listReader = new UnionListReader(listVector); for (int i = 0; i < COUNT; i++) { listReader.setPosition(i); for (int j = 0; j < i % 7; j++) { listReader.next(); Assert.assertEquals("record: " + i, j, listReader.reader().reader("int").readInteger().intValue()); Assert.assertEquals(j, listReader.reader().reader("bigInt").readLong().longValue()); } } } @Test public void listListType() { try (ListVector listVector = ListVector.empty("list", allocator)) { listVector.allocateNew(); UnionListWriter listWriter = new UnionListWriter(listVector); for (int i = 0; i < COUNT; i++) { listWriter.startList(); for (int j = 0; j < i % 7; j++) { ListWriter innerListWriter = listWriter.list(); innerListWriter.startList(); for (int k = 0; k < i % 13; k++) { innerListWriter.integer().writeInt(k); } innerListWriter.endList(); } listWriter.endList(); } listWriter.setValueCount(COUNT); checkListOfLists(listVector); } } /** * This test is similar to {@link #listListType()} but we get the inner list writer once at the beginning */ @Test public void listListType2() { try (ListVector listVector = ListVector.empty("list", allocator)) { listVector.allocateNew(); UnionListWriter listWriter = new UnionListWriter(listVector); ListWriter innerListWriter = listWriter.list(); for (int i = 0; i < COUNT; i++) { listWriter.startList(); for (int j = 0; j < i % 7; j++) { innerListWriter.startList(); for (int k = 0; k < i % 13; k++) { innerListWriter.integer().writeInt(k); } innerListWriter.endList(); } listWriter.endList(); } listWriter.setValueCount(COUNT); checkListOfLists(listVector); } } private void checkListOfLists(final ListVector listVector) { UnionListReader listReader = new UnionListReader(listVector); for (int i = 0; i < COUNT; i++) { listReader.setPosition(i); for (int j = 0; j < i % 7; j++) { listReader.next(); FieldReader innerListReader = listReader.reader(); for (int k = 0; k < i % 13; k++) { innerListReader.next(); Assert.assertEquals("record: " + i, k, innerListReader.reader().readInteger().intValue()); } } } } @Test public void unionListListType() { try (ListVector listVector = ListVector.empty("list", allocator)) { listVector.allocateNew(); UnionListWriter listWriter = new UnionListWriter(listVector); for (int i = 0; i < COUNT; i++) { listWriter.startList(); for (int j = 0; j < i % 7; j++) { ListWriter innerListWriter = listWriter.list(); innerListWriter.startList(); for (int k = 0; k < i % 13; k++) { if (k % 2 == 0) { innerListWriter.integer().writeInt(k); } else { innerListWriter.bigInt().writeBigInt(k); } } innerListWriter.endList(); } listWriter.endList(); } listWriter.setValueCount(COUNT); checkUnionList(listVector); } } /** * This test is similar to {@link #unionListListType()} but we get the inner list writer once at the beginning */ @Test public void unionListListType2() { try (ListVector listVector = ListVector.empty("list", allocator)) { listVector.allocateNew(); UnionListWriter listWriter = new UnionListWriter(listVector); ListWriter innerListWriter = listWriter.list(); for (int i = 0; i < COUNT; i++) { listWriter.startList(); for (int j = 0; j < i % 7; j++) { innerListWriter.startList(); for (int k = 0; k < i % 13; k++) { if (k % 2 == 0) { innerListWriter.integer().writeInt(k); } else { innerListWriter.bigInt().writeBigInt(k); } } innerListWriter.endList(); } listWriter.endList(); } listWriter.setValueCount(COUNT); checkUnionList(listVector); } } private void checkUnionList(ListVector listVector) { UnionListReader listReader = new UnionListReader(listVector); for (int i = 0; i < COUNT; i++) { listReader.setPosition(i); for (int j = 0; j < i % 7; j++) { listReader.next(); FieldReader innerListReader = listReader.reader(); for (int k = 0; k < i % 13; k++) { innerListReader.next(); if (k % 2 == 0) { Assert.assertEquals("record: " + i, k, innerListReader.reader().readInteger().intValue()); } else { Assert.assertEquals("record: " + i, k, innerListReader.reader().readLong().longValue()); } } } } } @Test public void simpleUnion() { UnionVector vector = new UnionVector("union", allocator, null); UnionWriter unionWriter = new UnionWriter(vector); unionWriter.allocate(); for (int i = 0; i < COUNT; i++) { unionWriter.setPosition(i); if (i % 2 == 0) { unionWriter.writeInt(i); } else { unionWriter.writeFloat4((float) i); } } vector.getMutator().setValueCount(COUNT); UnionReader unionReader = new UnionReader(vector); for (int i = 0; i < COUNT; i++) { unionReader.setPosition(i); if (i % 2 == 0) { Assert.assertEquals(i, i, unionReader.readInteger()); } else { Assert.assertEquals((float) i, unionReader.readFloat(), 1e-12); } } vector.close(); } @Test public void promotableWriter() { MapVector parent = MapVector.empty("parent", allocator); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); for (int i = 0; i < 100; i++) { BigIntWriter bigIntWriter = rootWriter.bigInt("a"); bigIntWriter.setPosition(i); bigIntWriter.writeBigInt(i); } Field field = parent.getField().getChildren().get(0).getChildren().get(0); Assert.assertEquals("a", field.getName()); Assert.assertEquals(Int.TYPE_TYPE, field.getType().getTypeID()); Int intType = (Int) field.getType(); Assert.assertEquals(64, intType.getBitWidth()); Assert.assertTrue(intType.getIsSigned()); for (int i = 100; i < 200; i++) { VarCharWriter varCharWriter = rootWriter.varChar("a"); varCharWriter.setPosition(i); byte[] bytes = Integer.toString(i).getBytes(); ArrowBuf tempBuf = allocator.buffer(bytes.length); tempBuf.setBytes(0, bytes); varCharWriter.writeVarChar(0, bytes.length, tempBuf); } field = parent.getField().getChildren().get(0).getChildren().get(0); Assert.assertEquals("a", field.getName()); Assert.assertEquals(Union.TYPE_TYPE, field.getType().getTypeID()); Assert.assertEquals(Int.TYPE_TYPE, field.getChildren().get(0).getType().getTypeID()); Assert.assertEquals(Utf8.TYPE_TYPE, field.getChildren().get(1).getType().getTypeID()); MapReader rootReader = new SingleMapReaderImpl(parent).reader("root"); for (int i = 0; i < 100; i++) { rootReader.setPosition(i); FieldReader reader = rootReader.reader("a"); Long value = reader.readLong(); Assert.assertNotNull("index: " + i, value); Assert.assertEquals(i, value.intValue()); } for (int i = 100; i < 200; i++) { rootReader.setPosition(i); FieldReader reader = rootReader.reader("a"); Text value = reader.readText(); Assert.assertEquals(Integer.toString(i), value.toString()); } } /** * Even without writing to the writer, the union schema is created correctly */ @Test public void promotableWriterSchema() { MapVector parent = MapVector.empty("parent", allocator); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); rootWriter.bigInt("a"); rootWriter.varChar("a"); Field field = parent.getField().getChildren().get(0).getChildren().get(0); Assert.assertEquals("a", field.getName()); Assert.assertEquals(ArrowTypeID.Union, field.getType().getTypeID()); Assert.assertEquals(ArrowTypeID.Int, field.getChildren().get(0).getType().getTypeID()); Int intType = (Int) field.getChildren().get(0).getType(); Assert.assertEquals(64, intType.getBitWidth()); Assert.assertTrue(intType.getIsSigned()); Assert.assertEquals(ArrowTypeID.Utf8, field.getChildren().get(1).getType().getTypeID()); } private Set<String> getFieldNames(List<Field> fields) { Set<String> fieldNames = new HashSet<>(); for (Field field: fields) { fieldNames.add(field.getName()); if (!field.getChildren().isEmpty()) { for (String name: getFieldNames(field.getChildren())) { fieldNames.add(field.getName() + "::" + name); } } } return fieldNames; } @Test public void mapWriterMixedCaseFieldNames() { // test case-sensitive MapWriter MapVector parent = MapVector.empty("parent", allocator); ComplexWriter writer = new ComplexWriterImpl("rootCaseSensitive", parent, false, true); MapWriter rootWriterCaseSensitive = writer.rootAsMap(); rootWriterCaseSensitive.bigInt("int_field"); rootWriterCaseSensitive.bigInt("Int_Field"); rootWriterCaseSensitive.float4("float_field"); rootWriterCaseSensitive.float4("Float_Field"); MapWriter mapFieldWriterCaseSensitive = rootWriterCaseSensitive.map("map_field"); mapFieldWriterCaseSensitive.varChar("char_field"); mapFieldWriterCaseSensitive.varChar("Char_Field"); ListWriter listFieldWriterCaseSensitive = rootWriterCaseSensitive.list("list_field"); MapWriter listMapFieldWriterCaseSensitive = listFieldWriterCaseSensitive.map(); listMapFieldWriterCaseSensitive.bit("bit_field"); listMapFieldWriterCaseSensitive.bit("Bit_Field"); List<Field> fieldsCaseSensitive = parent.getField().getChildren().get(0).getChildren(); Set<String> fieldNamesCaseSensitive = getFieldNames(fieldsCaseSensitive); Assert.assertEquals(11, fieldNamesCaseSensitive.size()); Assert.assertTrue(fieldNamesCaseSensitive.contains("int_field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("Int_Field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("float_field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("Float_Field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("map_field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("map_field::char_field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("map_field::Char_Field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("list_field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("list_field::$data$")); Assert.assertTrue(fieldNamesCaseSensitive.contains("list_field::$data$::bit_field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("list_field::$data$::Bit_Field")); // test case-insensitive MapWriter ComplexWriter writerCaseInsensitive = new ComplexWriterImpl("rootCaseInsensitive", parent, false, false); MapWriter rootWriterCaseInsensitive = writerCaseInsensitive.rootAsMap(); rootWriterCaseInsensitive.bigInt("int_field"); rootWriterCaseInsensitive.bigInt("Int_Field"); rootWriterCaseInsensitive.float4("float_field"); rootWriterCaseInsensitive.float4("Float_Field"); MapWriter mapFieldWriterCaseInsensitive = rootWriterCaseInsensitive.map("map_field"); mapFieldWriterCaseInsensitive.varChar("char_field"); mapFieldWriterCaseInsensitive.varChar("Char_Field"); ListWriter listFieldWriterCaseInsensitive = rootWriterCaseInsensitive.list("list_field"); MapWriter listMapFieldWriterCaseInsensitive = listFieldWriterCaseInsensitive.map(); listMapFieldWriterCaseInsensitive.bit("bit_field"); listMapFieldWriterCaseInsensitive.bit("Bit_Field"); List<Field> fieldsCaseInsensitive = parent.getField().getChildren().get(1).getChildren(); Set<String> fieldNamesCaseInsensitive = getFieldNames(fieldsCaseInsensitive); Assert.assertEquals(7, fieldNamesCaseInsensitive.size()); Assert.assertTrue(fieldNamesCaseInsensitive.contains("int_field")); Assert.assertTrue(fieldNamesCaseInsensitive.contains("float_field")); Assert.assertTrue(fieldNamesCaseInsensitive.contains("map_field")); Assert.assertTrue(fieldNamesCaseInsensitive.contains("map_field::char_field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("list_field")); Assert.assertTrue(fieldNamesCaseSensitive.contains("list_field::$data$")); Assert.assertTrue(fieldNamesCaseSensitive.contains("list_field::$data$::bit_field")); } @Test public void timeStampSecWriter() throws Exception { // test values final long expectedSecs = 981173106L; final LocalDateTime expectedSecDateTime = new LocalDateTime(2001, 2, 3, 4, 5, 6, 0); // write MapVector parent = new MapVector("parent", allocator, null); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); { TimeStampSecWriter timeStampSecWriter = rootWriter.timeStampSec("sec"); timeStampSecWriter.setPosition(0); timeStampSecWriter.writeTimeStampSec(expectedSecs); } { TimeStampSecTZWriter timeStampSecTZWriter = rootWriter.timeStampSecTZ("secTZ", "UTC"); timeStampSecTZWriter.setPosition(1); timeStampSecTZWriter.writeTimeStampSecTZ(expectedSecs); } // schema List<Field> children = parent.getField().getChildren().get(0).getChildren(); checkTimestampField(children.get(0), "sec"); checkTimestampTZField(children.get(1), "secTZ", "UTC"); // read MapReader rootReader = new SingleMapReaderImpl(parent).reader("root"); { FieldReader secReader = rootReader.reader("sec"); secReader.setPosition(0); LocalDateTime secDateTime = secReader.readLocalDateTime(); Assert.assertEquals(expectedSecDateTime, secDateTime); long secLong = secReader.readLong(); Assert.assertEquals(expectedSecs, secLong); } { FieldReader secTZReader = rootReader.reader("secTZ"); secTZReader.setPosition(1); long secTZLong = secTZReader.readLong(); Assert.assertEquals(expectedSecs, secTZLong); } } @Test public void timeStampMilliWriters() throws Exception { // test values final long expectedMillis = 981173106123L; final LocalDateTime expectedMilliDateTime = new LocalDateTime(2001, 2, 3, 4, 5, 6, 123); // write MapVector parent = MapVector.empty("parent", allocator); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); { TimeStampMilliWriter timeStampWriter = rootWriter.timeStampMilli("milli"); timeStampWriter.setPosition(0); timeStampWriter.writeTimeStampMilli(expectedMillis); } String tz = DateUtility.getTimeZone(10); { TimeStampMilliTZWriter timeStampTZWriter = rootWriter.timeStampMilliTZ("milliTZ", tz); timeStampTZWriter.setPosition(0); timeStampTZWriter.writeTimeStampMilliTZ(expectedMillis); } // schema List<Field> children = parent.getField().getChildren().get(0).getChildren(); checkTimestampField(children.get(0), "milli"); checkTimestampTZField(children.get(1), "milliTZ", tz); // read MapReader rootReader = new SingleMapReaderImpl(parent).reader("root"); { FieldReader milliReader = rootReader.reader("milli"); milliReader.setPosition(0); LocalDateTime milliDateTime = milliReader.readLocalDateTime(); Assert.assertEquals(expectedMilliDateTime, milliDateTime); long milliLong = milliReader.readLong(); Assert.assertEquals(expectedMillis, milliLong); } { FieldReader milliTZReader = rootReader.reader("milliTZ"); milliTZReader.setPosition(0); long milliTZLong = milliTZReader.readLong(); Assert.assertEquals(expectedMillis, milliTZLong); } } private void checkTimestampField(Field field, String name) { Assert.assertEquals(name, field.getName()); Assert.assertEquals(ArrowType.Timestamp.TYPE_TYPE, field.getType().getTypeID()); } private void checkTimestampTZField(Field field, String name, String tz) { checkTimestampField(field, name); Assert.assertEquals(tz, ((Timestamp)field.getType()).getTimezone()); } @Test public void timeStampMicroWriters() throws Exception { // test values final long expectedMicros = 981173106123456L; final LocalDateTime expectedMicroDateTime = new LocalDateTime(2001, 2, 3, 4, 5, 6, 123); // write MapVector parent = new MapVector("parent", allocator, null); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); { TimeStampMicroWriter timeStampMicroWriter = rootWriter.timeStampMicro("micro"); timeStampMicroWriter.setPosition(0); timeStampMicroWriter.writeTimeStampMicro(expectedMicros); } String tz = DateUtility.getTimeZone(5); { TimeStampMicroTZWriter timeStampMicroWriter = rootWriter.timeStampMicroTZ("microTZ", tz); timeStampMicroWriter.setPosition(1); timeStampMicroWriter.writeTimeStampMicroTZ(expectedMicros); } // schema List<Field> children = parent.getField().getChildren().get(0).getChildren(); checkTimestampField(children.get(0), "micro"); checkTimestampTZField(children.get(1), "microTZ", tz); // read MapReader rootReader = new SingleMapReaderImpl(parent).reader("root"); { FieldReader microReader = rootReader.reader("micro"); microReader.setPosition(0); LocalDateTime microDateTime = microReader.readLocalDateTime(); Assert.assertEquals(expectedMicroDateTime, microDateTime); long microLong = microReader.readLong(); Assert.assertEquals(expectedMicros, microLong); } { FieldReader microReader = rootReader.reader("microTZ"); microReader.setPosition(1); long microLong = microReader.readLong(); Assert.assertEquals(expectedMicros, microLong); } } @Test public void timeStampNanoWriters() throws Exception { // test values final long expectedNanos = 981173106123456789L; final LocalDateTime expectedNanoDateTime = new LocalDateTime(2001, 2, 3, 4, 5, 6, 123); // write MapVector parent = new MapVector("parent", allocator, null); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); { TimeStampNanoWriter timeStampNanoWriter = rootWriter.timeStampNano("nano"); timeStampNanoWriter.setPosition(0); timeStampNanoWriter.writeTimeStampNano(expectedNanos); } String tz = DateUtility.getTimeZone(3); { TimeStampNanoTZWriter timeStampNanoWriter = rootWriter.timeStampNanoTZ("nanoTZ", tz); timeStampNanoWriter.setPosition(0); timeStampNanoWriter.writeTimeStampNanoTZ(expectedNanos); } // schema List<Field> children = parent.getField().getChildren().get(0).getChildren(); checkTimestampField(children.get(0), "nano"); checkTimestampTZField(children.get(1), "nanoTZ", tz); // read MapReader rootReader = new SingleMapReaderImpl(parent).reader("root"); { FieldReader nanoReader = rootReader.reader("nano"); nanoReader.setPosition(0); LocalDateTime nanoDateTime = nanoReader.readLocalDateTime(); Assert.assertEquals(expectedNanoDateTime, nanoDateTime); long nanoLong = nanoReader.readLong(); Assert.assertEquals(expectedNanos, nanoLong); } { FieldReader nanoReader = rootReader.reader("nanoTZ"); nanoReader.setPosition(0); long nanoLong = nanoReader.readLong(); Assert.assertEquals(expectedNanos, nanoLong); NullableTimeStampNanoTZHolder h = new NullableTimeStampNanoTZHolder(); nanoReader.read(h); Assert.assertEquals(expectedNanos, h.value); } } @Test public void complexCopierWithList() { MapVector parent = MapVector.empty("parent", allocator); ComplexWriter writer = new ComplexWriterImpl("root", parent); MapWriter rootWriter = writer.rootAsMap(); ListWriter listWriter = rootWriter.list("list"); MapWriter innerMapWriter = listWriter.map(); IntWriter outerIntWriter = listWriter.integer(); rootWriter.start(); listWriter.startList(); outerIntWriter.writeInt(1); outerIntWriter.writeInt(2); innerMapWriter.start(); IntWriter intWriter = innerMapWriter.integer("a"); intWriter.writeInt(1); innerMapWriter.end(); innerMapWriter.start(); intWriter = innerMapWriter.integer("a"); intWriter.writeInt(2); innerMapWriter.end(); listWriter.endList(); rootWriter.end(); writer.setValueCount(1); NullableMapVector mapVector = (NullableMapVector) parent.getChild("root"); TransferPair tp = mapVector.getTransferPair(allocator); tp.splitAndTransfer(0, 1); MapVector toMapVector = (MapVector) tp.getTo(); JsonStringHashMap<?,?> toMapValue = (JsonStringHashMap<?,?>) toMapVector.getAccessor().getObject(0); JsonStringArrayList<?> object = (JsonStringArrayList<?>) toMapValue.get("list"); assertEquals(1, object.get(0)); assertEquals(2, object.get(1)); JsonStringHashMap<?,?> innerMap = (JsonStringHashMap<?,?>) object.get(2); assertEquals(1, innerMap.get("a")); innerMap = (JsonStringHashMap<?,?>) object.get(3); assertEquals(2, innerMap.get("a")); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ecr.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The details of a pull through cache rule. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ecr-2015-09-21/PullThroughCacheRule" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class PullThroughCacheRule implements Serializable, Cloneable, StructuredPojo { /** * <p> * The Amazon ECR repository prefix associated with the pull through cache rule. * </p> */ private String ecrRepositoryPrefix; /** * <p> * The upstream registry URL associated with the pull through cache rule. * </p> */ private String upstreamRegistryUrl; /** * <p> * The date and time the pull through cache was created. * </p> */ private java.util.Date createdAt; /** * <p> * The Amazon Web Services account ID associated with the registry the pull through cache rule is associated with. * </p> */ private String registryId; /** * <p> * The Amazon ECR repository prefix associated with the pull through cache rule. * </p> * * @param ecrRepositoryPrefix * The Amazon ECR repository prefix associated with the pull through cache rule. */ public void setEcrRepositoryPrefix(String ecrRepositoryPrefix) { this.ecrRepositoryPrefix = ecrRepositoryPrefix; } /** * <p> * The Amazon ECR repository prefix associated with the pull through cache rule. * </p> * * @return The Amazon ECR repository prefix associated with the pull through cache rule. */ public String getEcrRepositoryPrefix() { return this.ecrRepositoryPrefix; } /** * <p> * The Amazon ECR repository prefix associated with the pull through cache rule. * </p> * * @param ecrRepositoryPrefix * The Amazon ECR repository prefix associated with the pull through cache rule. * @return Returns a reference to this object so that method calls can be chained together. */ public PullThroughCacheRule withEcrRepositoryPrefix(String ecrRepositoryPrefix) { setEcrRepositoryPrefix(ecrRepositoryPrefix); return this; } /** * <p> * The upstream registry URL associated with the pull through cache rule. * </p> * * @param upstreamRegistryUrl * The upstream registry URL associated with the pull through cache rule. */ public void setUpstreamRegistryUrl(String upstreamRegistryUrl) { this.upstreamRegistryUrl = upstreamRegistryUrl; } /** * <p> * The upstream registry URL associated with the pull through cache rule. * </p> * * @return The upstream registry URL associated with the pull through cache rule. */ public String getUpstreamRegistryUrl() { return this.upstreamRegistryUrl; } /** * <p> * The upstream registry URL associated with the pull through cache rule. * </p> * * @param upstreamRegistryUrl * The upstream registry URL associated with the pull through cache rule. * @return Returns a reference to this object so that method calls can be chained together. */ public PullThroughCacheRule withUpstreamRegistryUrl(String upstreamRegistryUrl) { setUpstreamRegistryUrl(upstreamRegistryUrl); return this; } /** * <p> * The date and time the pull through cache was created. * </p> * * @param createdAt * The date and time the pull through cache was created. */ public void setCreatedAt(java.util.Date createdAt) { this.createdAt = createdAt; } /** * <p> * The date and time the pull through cache was created. * </p> * * @return The date and time the pull through cache was created. */ public java.util.Date getCreatedAt() { return this.createdAt; } /** * <p> * The date and time the pull through cache was created. * </p> * * @param createdAt * The date and time the pull through cache was created. * @return Returns a reference to this object so that method calls can be chained together. */ public PullThroughCacheRule withCreatedAt(java.util.Date createdAt) { setCreatedAt(createdAt); return this; } /** * <p> * The Amazon Web Services account ID associated with the registry the pull through cache rule is associated with. * </p> * * @param registryId * The Amazon Web Services account ID associated with the registry the pull through cache rule is associated * with. */ public void setRegistryId(String registryId) { this.registryId = registryId; } /** * <p> * The Amazon Web Services account ID associated with the registry the pull through cache rule is associated with. * </p> * * @return The Amazon Web Services account ID associated with the registry the pull through cache rule is associated * with. */ public String getRegistryId() { return this.registryId; } /** * <p> * The Amazon Web Services account ID associated with the registry the pull through cache rule is associated with. * </p> * * @param registryId * The Amazon Web Services account ID associated with the registry the pull through cache rule is associated * with. * @return Returns a reference to this object so that method calls can be chained together. */ public PullThroughCacheRule withRegistryId(String registryId) { setRegistryId(registryId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEcrRepositoryPrefix() != null) sb.append("EcrRepositoryPrefix: ").append(getEcrRepositoryPrefix()).append(","); if (getUpstreamRegistryUrl() != null) sb.append("UpstreamRegistryUrl: ").append(getUpstreamRegistryUrl()).append(","); if (getCreatedAt() != null) sb.append("CreatedAt: ").append(getCreatedAt()).append(","); if (getRegistryId() != null) sb.append("RegistryId: ").append(getRegistryId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof PullThroughCacheRule == false) return false; PullThroughCacheRule other = (PullThroughCacheRule) obj; if (other.getEcrRepositoryPrefix() == null ^ this.getEcrRepositoryPrefix() == null) return false; if (other.getEcrRepositoryPrefix() != null && other.getEcrRepositoryPrefix().equals(this.getEcrRepositoryPrefix()) == false) return false; if (other.getUpstreamRegistryUrl() == null ^ this.getUpstreamRegistryUrl() == null) return false; if (other.getUpstreamRegistryUrl() != null && other.getUpstreamRegistryUrl().equals(this.getUpstreamRegistryUrl()) == false) return false; if (other.getCreatedAt() == null ^ this.getCreatedAt() == null) return false; if (other.getCreatedAt() != null && other.getCreatedAt().equals(this.getCreatedAt()) == false) return false; if (other.getRegistryId() == null ^ this.getRegistryId() == null) return false; if (other.getRegistryId() != null && other.getRegistryId().equals(this.getRegistryId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEcrRepositoryPrefix() == null) ? 0 : getEcrRepositoryPrefix().hashCode()); hashCode = prime * hashCode + ((getUpstreamRegistryUrl() == null) ? 0 : getUpstreamRegistryUrl().hashCode()); hashCode = prime * hashCode + ((getCreatedAt() == null) ? 0 : getCreatedAt().hashCode()); hashCode = prime * hashCode + ((getRegistryId() == null) ? 0 : getRegistryId().hashCode()); return hashCode; } @Override public PullThroughCacheRule clone() { try { return (PullThroughCacheRule) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.ecr.model.transform.PullThroughCacheRuleMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/** * Copyright (c) 2004-2005, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of California, Los Angeles nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package avrora.sim.radio; import avrora.sim.FiniteStateMachine; import avrora.sim.Simulator; import avrora.sim.output.SimPrinter; import avrora.sim.clock.Clock; import avrora.sim.clock.Synchronizer; import avrora.sim.energy.Energy; import avrora.sim.mcu.*; import avrora.sim.mcu.Microcontroller.Pin.ListenableBooleanViewInput; import cck.text.StringUtil; import cck.util.Arithmetic; /** * The <code>CC1000Radio</code> class is a simulation of the CC1000 radio for use with avrora. The CC1000 * radio is used with the Mica2 platform in the real world. Verbose printers for this class include * "radio.cc1000", "radio.cc1000.pinconfig". * * @author Ben L. Titzer * @author Daniel Lee */ public class CC1000Radio implements Radio { private static final double FXOSC_FREQUENCY = 14745600.0; /** * Register addresses. */ public static final int MAIN = 0x00; public static final int FREQ_2A = 0x01; public static final int FREQ_1A = 0x02; public static final int FREQ_0A = 0x03; public static final int FREQ_2B = 0x04; public static final int FREQ_1B = 0x05; public static final int FREQ_0B = 0x06; public static final int FSEP1 = 0x07; public static final int FSEP0 = 0x08; public static final int CURRENT = 0x09; public static final int FRONT_END = 0x0a; public static final int PA_POW = 0x0b; public static final int PLL = 0x0c; public static final int LOCK = 0x0d; public static final int CAL = 0x0e; public static final int MODEM2 = 0x0f; public static final int MODEM1 = 0x10; public static final int MODEM0 = 0x11; public static final int MATCH = 0x12; public static final int FSCTRL = 0x13; public static final int PRESCALER = 0x1c; public static final int TEST6 = 0x40; public static final int TEST5 = 0x41; public static final int TEST4 = 0x42; public static final int TEST3 = 0x43; public static final int TEST2 = 0x44; public static final int TEST1 = 0x45; public static final int TEST0 = 0x46; protected static final String[] allModeNames = RadioEnergy.allModeNames(); protected static final int[][] ttm = FiniteStateMachine.buildSparseTTM(allModeNames.length, 0); protected RadioRegister[] registers = new RadioRegister[0x47]; /** * Registers */ protected final MainRegister MAIN_reg; protected final FrequencyRegister FREQ_A_reg; protected final FrequencyRegister FREQ_B_reg; protected final FrequencySeparationRegister FSEP_reg; protected final CurrentRegister CURRENT_reg; protected final FrontEndRegister FRONT_END_reg; protected final PA_POWRegister PA_POW_reg; protected final PLLRegister PLL_reg; protected final LockRegister LOCK_reg; protected final CALRegister CAL_reg; protected final Modem2Register MODEM_2_reg; protected final Modem1Register MODEM_1_reg; protected final Modem0Register MODEM_0_reg; protected final MatchRegister MATCH_reg; protected final FSCTRLRegister FSCTRL_reg; protected final PrescalerRegister PRESCALER_reg; protected final SimPrinter radioPrinter; protected final long xoscFrequency; protected FrequencyRegister currentFrequencyRegister; /** * Connected Microcontroller, Simulator and SimulatorThread should all correspond. */ protected final Microcontroller mcu; protected final Simulator sim; protected final Clock clock; protected final FiniteStateMachine stateMachine; public final CC1000Radio.SerialConfigurationInterface config; public CC1000Radio(Microcontroller mcu, long xfreq) { xoscFrequency = xfreq; this.mcu = mcu; this.sim = mcu.getSimulator(); this.clock = sim.getClock(); radioPrinter = sim.getPrinter("radio.cc1000"); for (int i = 0x14; i < registers.length; i++) { registers[i] = new DummyRegister(i); } registers[MAIN] = MAIN_reg = new MainRegister(); FREQ_A_reg = new FrequencyRegister("A"); registers[FREQ_2A] = FREQ_A_reg.reg2; registers[FREQ_1A] = FREQ_A_reg.reg1; registers[FREQ_0A] = FREQ_A_reg.reg0; FREQ_B_reg = new FrequencyRegister("B"); registers[FREQ_2B] = FREQ_B_reg.reg2; registers[FREQ_1B] = FREQ_B_reg.reg1; registers[FREQ_0B] = FREQ_B_reg.reg0; FSEP_reg = new FrequencySeparationRegister(); registers[FSEP1] = FSEP_reg.reg1; registers[FSEP0] = FSEP_reg.reg0; registers[CURRENT] = CURRENT_reg = new CurrentRegister(); registers[FRONT_END] = FRONT_END_reg = new FrontEndRegister(); registers[PA_POW] = PA_POW_reg = new PA_POWRegister(); registers[PLL] = PLL_reg = new PLLRegister(); registers[LOCK] = LOCK_reg = new LockRegister(); registers[CAL] = CAL_reg = new CALRegister(); registers[MODEM2] = MODEM_2_reg = new Modem2Register(); registers[MODEM1] = MODEM_1_reg = new Modem1Register(); registers[MODEM0] = MODEM_0_reg = new Modem0Register(); registers[MATCH] = MATCH_reg = new MatchRegister(); registers[FSCTRL] = FSCTRL_reg = new FSCTRLRegister(); registers[PRESCALER] = PRESCALER_reg = new PrescalerRegister(); //setup energy recording Simulator simulator = mcu.getSimulator(); stateMachine = new FiniteStateMachine(simulator.getClock(), RadioEnergy.startMode, allModeNames, ttm); new Energy("Radio", RadioEnergy.modeAmpere, stateMachine, sim.getEnergyControl()); ATMegaFamily amcu = (ATMegaFamily) mcu; ticker = new SPITicker(); ticker.spiDevice = (SPIDevice)amcu.getDevice("spi"); setMedium(createMedium(null, null)); rssiOutput = new RSSIOutput(); ADC adc = ((ADC) amcu.getDevice("adc")); adc.connectADCInput(rssiOutput, 0); config = new SerialConfigurationInterface(); } /** * The <code>getFiniteStateMachine()</code> method gets a reference to the finite state * machine that represents this radio's state. For example, there are states corresponding * to "on", "off", "transmitting", and "receiving". The state names and numbers will vary * by radio implementation. The <code>FiniteStateMachine</code> instance allows the user * to instrument the state transitions in order to gather information during simulation. * @return a reference to the finite state machine for this radio */ public FiniteStateMachine getFiniteStateMachine() { return stateMachine; } /** * The <code>RadioRegister</code> is an abstract register grouping together registers on the CC1000 * radio. */ protected abstract class RadioRegister { protected final String id; // name of this register protected byte value; // current value of this register RadioRegister(String id, byte def) { this.id = id; this.value = def; } public void write(byte val) { value = val; decode(value); if (radioPrinter != null) { printStatus(); } } protected abstract void decode(byte val); protected void printStatus() { // default: do nothing } } /** * The <code>DummyRegister</code> is a filler class for registers within the 7-bit address space of the * radio registers, but do not actually exist/do anything in the real radio. */ protected class DummyRegister extends RadioRegister { DummyRegister(int i) { super("Dummy " + Integer.toHexString(i), (byte)0); } protected void decode(byte val) { } } /** * The main register on the CC1000. */ protected class MainRegister extends RadioRegister { public static final int RXTX = 7; // 0: RX, 1: TX public static final int F_REG = 6; // 0: A, 1: B public static final int RX_PD = 5; // Power down of RX part of interface public static final int TX_PD = 4; // Power down of TX part of interface public static final int FS_PD = 3; // Power down of Frequency Synthesizer public static final int CORE_PD = 2; // Power down of Crystal Oscillator Core public static final int BIAS_PD = 1; // Power down of BIAS and Crystal Oscillator Buffer public static final int RESET_N = 0; // Reset other registers to default value boolean rxtx; boolean fReg; boolean rxPd; boolean txPd; boolean fsPd; boolean corePd; boolean biasPd; boolean resetN; byte oldVal; boolean transmit_activated; boolean receive_activated; MainRegister() { super("MAIN", (byte)0x3e); } protected void decode(byte val) { rxtx = Arithmetic.getBit(val, RXTX); fReg = Arithmetic.getBit(val, F_REG); rxPd = Arithmetic.getBit(val, RX_PD); txPd = Arithmetic.getBit(val, TX_PD); fsPd = Arithmetic.getBit(val, FS_PD); corePd = Arithmetic.getBit(val, CORE_PD); biasPd = Arithmetic.getBit(val, BIAS_PD); resetN = Arithmetic.getBit(val, RESET_N); if (rxPd) receiver.endReceive(); else receiver.beginReceive(2.4); if (txPd) transmitter.endTransmit(); else transmitter.beginTransmit(getPower(),getFrequency()); if (!rxPd || !txPd) { ticker.activate(); } else { ticker.deactivate(); } boolean oldrxtx = Arithmetic.getBit(oldVal, RXTX); if (rxtx && !oldrxtx) { // switch from receive to transmit if (radioPrinter != null) { radioPrinter.println("CC1000: RX end receiving -> begin transmitting"); } } else if (!rxtx && oldrxtx) { // switch from transmit to receive if (radioPrinter != null) { radioPrinter.println("CC1000: TX end transmitting -> begin receiving"); } } currentFrequencyRegister = fReg ? FREQ_B_reg : FREQ_A_reg; if (resetN && !Arithmetic.getBit(oldVal, RESET_N)) { oldVal = val; // TODO: reset the radio. return; } if (val != oldVal) { // TODO: reduce this code to compute state more easily int state; if (corePd) state = 1; //power down state else state = 2; // core, e.g. crystal on state if (!corePd && !biasPd) state = 3; // crystal and bias on state if (!corePd && !biasPd && !fsPd) state = 4; // crystal, bias and synth. on if (!corePd && !biasPd && !fsPd && !rxtx && !rxPd) state = 5; // receive state if (!corePd && !biasPd && !fsPd && rxtx && !txPd) state = PA_POW_reg.getPower() + 6; stateMachine.transition(state); } oldVal = val; } protected void printStatus() { String rxtxS = rxtx ? "TX" : "RX"; String fRegS = fReg ? "B" : "A"; StringBuffer buf = new StringBuffer(100); buf.append("CC1000[MAIN]: "); buf.append(rxtxS); buf.append(", freg: "); buf.append(fRegS); buf.append(", rx pd: "); buf.append(StringUtil.toBit(rxPd)); buf.append(", tx pd: "); buf.append(StringUtil.toBit(txPd)); buf.append(", fs pd: "); buf.append(StringUtil.toBit(fsPd)); buf.append(", core pd: "); buf.append(StringUtil.toBit(corePd)); buf.append(", bias pd: "); buf.append(StringUtil.toBit(biasPd)); buf.append(", reset: "); buf.append(StringUtil.toBit(resetN)); radioPrinter.println(buf.toString()); } } /** * A frequency register on the CC1000. It is divided into three 8-bit registers. */ protected class FrequencyRegister { // TODO: use stacked register view. protected final FrequencySubRegister reg2; protected final FrequencySubRegister reg1; protected final FrequencySubRegister reg0; int frequency; // subId should be either A or b FrequencyRegister(String subId) { reg2 = new FrequencySubRegister("FREQ2" + subId); reg1 = new FrequencySubRegister("FREQ1" + subId); reg0 = new FrequencySubRegister("FREQ0" + subId); setFrequency(0x75a0cb); // default frequency is 0b 01111 0101 1010 0000 1100 1011, } protected void updateFrequency() { frequency = 0x00ff0000 & (reg2.value << 16); frequency |= 0x0000ff00 & (reg1.value << 8); frequency |= 0x000000ff & reg0.value; } protected void setFrequency(int frequency) { reg2.write((byte)((0x00ff0000 & frequency) >> 16)); reg1.write((byte)((0x0000ff00 & frequency) >> 8)); reg0.write((byte)((0x000000ff & frequency))); } /** * One of the three sub-registers in the 24-bit frequency register. */ protected class FrequencySubRegister extends RadioRegister { FrequencySubRegister(String id) { super(id, (byte)0); } protected void decode(byte val) { updateFrequency(); } } } /** * The frequency separation register on the CC1000. It is divided into two 8-bit registers. */ protected class FrequencySeparationRegister { // TODO: use stacked register view. protected final SubRegister reg1 = new SubRegister("FSEP1"); protected final SubRegister reg0 = new SubRegister("FSEP0"); FrequencySeparationRegister() { setFrequencySeparation(0x59); // default frequency separation is 0b 0000 0000 0101 1001 } int frequencySeparation; protected void updateFrequencySeparation() { frequencySeparation = (reg1.value & 0x0f) << 8; frequencySeparation |= reg0.value; } protected void setFrequencySeparation(int val) { reg1.write((byte)((0x0f00 & val) >> 8)); reg0.write((byte)(0xff & val)); } /** * One of the two sub-registers in the 18-bit frequency separation register. */ protected class SubRegister extends RadioRegister { SubRegister(String id) { super(id, (byte)0); } protected void decode(byte val) { updateFrequencySeparation(); } } } static final int[] VCO_CURRENT = {150, 250, 350, 450, 950, 1050, 1150, 1250, 1450, 1550, 1650, 1750, 2250, 2350, 2450, 2550}; // in microamperes static final double[] LO_DRIVE = {0.5, 1.0, 1.5, 2.0}; // in milliamperes static final int[] PA_DRIVE = {1, 2, 3, 4}; // in milliamperes /** * The <code>CurrentRegister</code> controls various currents running through the CC1000 wiring. */ protected class CurrentRegister extends RadioRegister { int vcoCurrent = 150; double loDrive = 0.5; int paDrive = 1; CurrentRegister() { super("CURRENT", (byte)0xca); // default value 0b 1100 1010 } protected void decode(byte val) { vcoCurrent = VCO_CURRENT[(val & 0xf0) >> 4]; loDrive = LO_DRIVE[(val & 0x0c) >> 2]; paDrive = PA_DRIVE[(val & 0x3)]; } protected void printStatus() { radioPrinter.println("CC1000[CURRENT]: vco current: " + vcoCurrent + ", LO drive: " + loDrive + ", PA drive: " + paDrive); } } static final int[] BUF_CURRENT = {520, 690}; // in microamperes static final double[] LNA_CURRENT = {0.8, 1.4, 1.8, 2.2}; // in milliamperes protected class FrontEndRegister extends RadioRegister { int bufCurrent = 520; double lnaCurrent = 0.8; static final int IF_RSSI_INACTIVE = 0; static final int IF_RSSI_ACTIVE = 1; static final int IF_RSSI_MIXER = 2; int ifRSSI; boolean xoscBypassExternal; FrontEndRegister() { super("FRONT_END", (byte)0); } protected void decode(byte val) { bufCurrent = BUF_CURRENT[(val & 0x20) >> 5]; lnaCurrent = LNA_CURRENT[(val & 0x18) >> 3]; ifRSSI = (val & 0x06) >> 1; xoscBypassExternal = Arithmetic.getBit(val, 0); } } protected class PA_POWRegister extends RadioRegister { int paHighPower; int paLowPower; PA_POWRegister() { super("PA_POW", (byte)0x0f); // default value 0b 0000 1111 } protected void decode(byte val) { paHighPower = (value & 0xf0) >> 4; paLowPower = (value & 0x0f); // TODO: probes.fireAtPowerChange(CC1000Radio.this, getPower()); //start energy tracking //check for transmission mode enabled if (!MAIN_reg.corePd && !MAIN_reg.biasPd && !MAIN_reg.fsPd && MAIN_reg.rxtx && !MAIN_reg.txPd) stateMachine.transition(getPower() + 6); } protected int getPower() { return value & 0xff; } protected void printStatus() { radioPrinter.println("CC1000[PA_POW]: PA high power: " + paHighPower + ", PA low power: " + paLowPower); } } protected class PLLRegister extends RadioRegister { boolean extFilter; int refDiv; boolean alarmDisable; boolean alarmHigh; boolean alarmLow; PLLRegister() { super("PLL", (byte)0x10); // default value 0b 00010000 } protected void decode(byte val) { extFilter = Arithmetic.getBit(val, 7); refDiv = (value & 0x78) >> 3; alarmDisable = Arithmetic.getBit(val, 2); alarmHigh = Arithmetic.getBit(val, 1); alarmLow = Arithmetic.getBit(val, 0); } } //PLL_LOCK_ACCURACY static final int[] SETS_LOCK_THRESHOLD = {127, 31}; static final int[] RESET_LOCK_THRESHOLD = {111, 15}; protected class LockRegister extends RadioRegister { static final int LOCK_NORMAL = 0; static final int LOCK_CONTINUOUS = 1; static final int LOCK_INSTANT = 2; static final int ALARM_H = 3; static final int ALARM_L = 4; static final int CAL_COMPLETE = 5; static final int IF_OUT = 6; static final int REFERENCE_DIVIDER = 7; static final int TX_DPB = 8; static final int MANCHESTER_VIOLATION = 9; static final int RX_PDB = 10; // 11 undefined // 12 undefined static final int LOCK_AVG_FILTER = 13; static final int N_DIVIDER = 14; static final int F_COMP = 15; final String[] LOCK_SELECT = {"LOCK NORMAL", "LOCK CONTINUOUS", "LOCK INSTANT", "ALARM HIGH", "ALARM LOW", "CAL COMPLETE", "IF OUT", "REFERENCE DIVIDER", "TX DPB", "MANCHESTER VIOLATION", "RX PDB", "NOT DEFINED (11)", "NOT DEFINED (12)", "LOCK AVG FILTER", "N DIVIDER", "F COMP"}; int lockSelect; boolean pllLockLength; int setsLockThreshold = 127; int resetLockThreshold = 111; boolean lockInstant; boolean lockContinuous; LockRegister() { super("LOCK", (byte)0); } protected void decode(byte val) { lockSelect = (val & 0xf0) >> 4; int pllLockAccuracy = (val & 0x0c) >> 2; setsLockThreshold = SETS_LOCK_THRESHOLD[pllLockAccuracy]; resetLockThreshold = RESET_LOCK_THRESHOLD[pllLockAccuracy]; pllLockLength = Arithmetic.getBit(val, 2); lockInstant = Arithmetic.getBit(val, 1); lockContinuous = Arithmetic.getBit(val, 0); } protected void printStatus() { StringBuffer buf = new StringBuffer(100); buf.append("CC1000[LOCK]: lock select: "); buf.append(LOCK_SELECT[lockSelect]); buf.append(", set thr: "); buf.append(setsLockThreshold); buf.append(", reset thr: "); buf.append(resetLockThreshold); buf.append(", inst: "); buf.append(StringUtil.toBit(lockInstant)); buf.append(", contin: "); buf.append(StringUtil.toBit(lockContinuous)); radioPrinter.println(buf.toString()); } public byte read() { return (byte)(value & 0x03); } } protected class CALRegister extends RadioRegister { static final int CAL_START = 7; static final int CAL_DUAL = 6; static final int CAL_WAIT = 5; static final int CAL_CURRENT = 4; static final int CAL_COMPLETE = 3; boolean calStart; boolean calDual; boolean calWait; boolean calCurrent; boolean calComplete; static final int CAL_ITERATE_NORMAL = 0x6; int calIterate; Calibrate calibrate = new Calibrate(); CALRegister() { super("CAL", (byte)0x05); // default value 0b 00000101 } boolean calibrating; protected void decode(byte val) { boolean oldCalStart = calStart; calStart = Arithmetic.getBit(val, 7); calDual = Arithmetic.getBit(val, 6); calWait = Arithmetic.getBit(val, 5); calCurrent = Arithmetic.getBit(val, 4); calComplete = Arithmetic.getBit(val, 3); calIterate = (value & 0x7); if (!oldCalStart && calStart && !calibrating) { calibrating = true; //OL: calibration time depends on the reference frequency //worst case is 34ms //it is determined with: 34ms * 1MHz / (Fxosc / REFDIV) //with Fxosc is 14.7456 MHz for CC1000 on Mica2 //and REFDIV is set in the PLL register //in the current TinyOS version (1.1.7) REFDIV seems to be 14 //resulting in a delay of a little more than 32ms //Reference: CC1000 datasheet (rev 2.1) pages 20 and 22 double calMs = (34.0 * 1000000.0 / FXOSC_FREQUENCY) * PLL_reg.refDiv; clock.insertEvent(calibrate, clock.millisToCycles(calMs)); } } protected void printStatus() { StringBuffer buf = new StringBuffer(100); buf.append("CC1000[CAL]: cal start: "); buf.append(StringUtil.toBit(calStart)); buf.append(", dual: "); buf.append(StringUtil.toBit(calDual)); buf.append(", wait: "); buf.append(StringUtil.toBit(calWait)); buf.append(", current: "); buf.append(StringUtil.toBit(calCurrent)); buf.append(", complete: "); buf.append(StringUtil.toBit(calComplete)); buf.append(", iterate: "); buf.append(calIterate); radioPrinter.println(buf.toString()); } /** */ protected class Calibrate implements Simulator.Event { public void fire() { // TODO: multiple calls to decode() value = Arithmetic.setBit(value, CAL_START, false); decode(value); value = Arithmetic.setBit(value, CAL_COMPLETE, true); decode(value); LOCK_reg.write((byte)((LOCK_reg.read() & 0x0f) | 0x50)); // LOCK = CAL_COMPLETE if (radioPrinter != null) { radioPrinter.println("CC1000: Calibration complete "); } calibrating = false; } } } protected class Modem2Register extends RadioRegister { boolean peakDetect; int peakLevelOffset; Modem2Register() { super("MODEM2", (byte)0x96); // default value 0b 1001 0110 } protected void decode(byte val) { peakDetect = Arithmetic.getBit(val, 7); peakLevelOffset = val & 0x7f; } } static final int[] SETTLING = {11, 22, 43, 86}; protected class Modem1Register extends RadioRegister { int mlimit; boolean lockAvgN; boolean lockAvgMode; int settling = 11; boolean modemResetN; Modem1Register() { super("MODEM1", (byte)0x67); // default value 0b 0110 0111 } protected void decode(byte val) { mlimit = (val & 0xe0) >> 5; lockAvgN = Arithmetic.getBit(val, 4); lockAvgMode = Arithmetic.getBit(val, 3); settling = SETTLING[(val & 0x06) >> 1]; modemResetN = Arithmetic.getBit(val, 0); } } static final int[] BAUDRATE = {600, 1200, 2400, 4800, 9600, 19200, 0, 0}; static final int[] XOSC_FREQ = {3686400, // 3-4 Mhz 7372800, // 6-8 Mhz 1105920, // 9-12 Mhz 1474560};// 12-16 Mhz /** * The baud rate of the system is determined by values on the MODEM0 register. TinyOS uses a baud rate of * 19.2 kBaud with manchester encoding, which translates into 9.6 kbps of data. */ protected class Modem0Register extends RadioRegister { int baudrate = 2400; int bitrate = 1200; static final int DATA_FORMAT_NRZ = 0; static final int DATA_FORMAT_MANCHESTER = 1; static final int DATA_FORMAT_UART = 2; int dataFormat = DATA_FORMAT_MANCHESTER; int xoscFreqRange = XOSC_FREQ[0]; Modem0Register() { super("MODEM0", (byte)0x24); decode(value); // default value 0b 0010 0100 } protected void decode(byte val) { int baudIndex = (val & 0x70) >> 4; int xoscIndex = (val & 0x3); dataFormat = (val & 0x0c) >> 2; xoscFreqRange = XOSC_FREQ[xoscIndex]; calculateBaudRate(baudIndex, xoscIndex); bitrate = baudrate / (dataFormat == DATA_FORMAT_MANCHESTER ? 2 : 1); // TODO: probes.fireAtBitRateChange(CC1000Radio.this, bitrate); } private void calculateBaudRate(int baudIndex, int xoscIndex) { if ( baudIndex == 5 && xoscFrequency > XOSC_FREQ[2]) { if ( xoscIndex == 0 ) baudrate = 76800; else if ( xoscIndex == 1 ) baudrate = 38400; else baudrate = BAUDRATE[baudIndex]; } else { baudrate = BAUDRATE[baudIndex]; } } protected void printStatus() { radioPrinter.println("CC1000[MODEM0]: "+baudrate+" baud, "+bitrate+" bit rate, manchester: "+ (dataFormat == DATA_FORMAT_MANCHESTER)); } } protected class MatchRegister extends RadioRegister { int rxMatch; int txMatch; MatchRegister() { super("MATCH", (byte)0); } protected void decode(byte val) { rxMatch = (val & 0xf0) >> 4; txMatch = (val & 0x0f); } } protected class FSCTRLRegister extends RadioRegister { boolean fsResetN; FSCTRLRegister() { super("FSCTRL", (byte)0x01); // default value 0b 0000 0001 } protected void decode(byte val) { fsResetN = Arithmetic.getBit(val, 0); } } static final double[] PRE_SWING = {1.0, 2.0 / 3, 7.0 / 3, 5.0 / 3}; static final double[] PRE_CURRENT = {1.0, 2.0 / 3, 1.0 / 2, 2.0 / 5}; protected class PrescalerRegister extends RadioRegister { double preSwing = 1.0; double preCurrent = 1.0; boolean ifInput; boolean ifFront; PrescalerRegister() { super("PRESCALER", (byte)0); } protected void decode(byte val) { preSwing = PRE_SWING[(val & 0xc0) >> 6]; preCurrent = PRE_CURRENT[(val & 0x30) >> 4]; ifInput = Arithmetic.getBit(val, 3); ifFront = Arithmetic.getBit(val, 4); } } /** * Reads the three pins used in the three wire serial configuration interface. Microcontrollers can * program this radio by communication over this interfance. Debug output for communication over this * interface is available on "sim.cc1000.pinconfig" */ public class SerialConfigurationInterface { public final PCLKOutput PCLK_in = new PCLKOutput(); public final PDATAOutput PDATA_in = new PDATAOutput(); public final PDATAInput PDATA_out = new PDATAInput(); public final PALEOutput PALE_in = new PALEOutput(); byte address; boolean writeCommand; int writeValue; boolean inputPin; byte readData; int bitsRead; SimPrinter readerPrinter; SerialConfigurationInterface() { readerPrinter = sim.getPrinter("radio.cc1000.pinconfig"); } /** * Clocking the PCLK pin is what drives the action of the configuration interface. One bit of data on * PDATA per clock. */ protected class PCLKOutput implements Microcontroller.Pin.Output { protected boolean last; public void write(boolean level) { // only trigger on level changes if (level != last) { // for address or write command on falling edge // when the address is over and we have a read command on rising edge if (((!PALE_in.last || writeCommand) && !level) || (PALE_in.last && !writeCommand && level)) clockInBit(); last = level; } } } protected class PDATAInput extends ListenableBooleanViewInput { } protected class PDATAOutput implements Microcontroller.Pin.Output { public void write(boolean level) { inputPin = level; } } protected class PALEOutput implements Microcontroller.Pin.Output { protected boolean last; public void write(boolean level) { if ( level == last ) return; if (!level) { if (bitsRead != 0 && readerPrinter != null) { readerPrinter.println("Unexpected falling edge on CC1000.PALE when bitsRead is " + bitsRead); } bitsRead = 0; address = 0; writeCommand = false; writeValue = 0; } else { if (bitsRead != 8 && readerPrinter != null) { readerPrinter.println("Unexpected rising edge on CC1000.PALE when bitsRead is " + bitsRead); } bitsRead = 8; // set the first output bit on a read command (see CC1000 data sheet Fig. 5 page 14) if (!writeCommand) outputReadBit(); } last = level; } } private void clockInBit() { if (bitsRead < 7) { // the first 7 bits are the address address <<= 1; address |= inputPin ? 0x1 : 0x0; } else if (bitsRead == 7) { // the 8th bit is the read/write bit writeCommand = inputPin; if (!writeCommand) { readData = registers[address].value; // the first bit is set on PDATA when PALE goes high } } else if ( bitsRead < 16 ) { // the 9-16th bits are either the value to write or the value of the register if (writeCommand) { // shift in the new bit into the value to write inputWriteBit(); } else { // shift out another bit from the register value outputReadBit(); } } bitsRead++; if (bitsRead == 16) { // complete the command. if (writeCommand) { registers[address].write((byte)writeValue); if (readerPrinter != null) readerPrinter.println("CC1000.Reg[" + StringUtil.toHex(address, 2) + "] <= " + StringUtil.toMultirepString(writeValue, 8)); } else { if (readerPrinter != null) readerPrinter.println("CC1000.Reg[" + StringUtil.toHex(address, 2) + "] -> " + StringUtil.toMultirepString(readData, 8)); } // reset the state bitsRead = 0; address = 0; } } private void inputWriteBit() { writeValue = writeValue << 1 | (inputPin ? 0x1 : 0x0); } private void outputReadBit() { PDATA_out.setLevel(Arithmetic.getBit(readData, 14 - bitsRead)); } } /** * get the transmission power (dB) */ public double getPower() { double powerSet = (double)PA_POW_reg.getPower(); double power; //convert to dB (by linearization) we distinguish values less than 16 //and higher ones. Probably a lookup table and Spline //interpolation would be nice here if (powerSet < 16) power = 0.12 * powerSet - 1.8; else power = 0.00431 * powerSet - 0.06459; return power; } /** * get transmission frequency (Mhz) */ public double getFrequency() { double fref = FXOSC_FREQUENCY / PLL_reg.refDiv; int freq = !MAIN_reg.fReg ? FREQ_A_reg.frequency : FREQ_B_reg.frequency; return fref * (freq + 8192) / (16384*1E6); } private class SPITicker implements Simulator.Event { protected SPIDevice spiDevice; protected boolean activated; public void fire() { // exchange a byte with the SPI device. spiTick = clock.getCount(); SPI.Frame frame = spiDevice.exchange(SPI.newFrame((byte)(rxBuffer >> 8))); txBuffer = frame.data; clock.insertEvent(this, receiver.cyclesPerByte); } protected void activate() { if (!activated) { activated = true; clock.insertEvent(this, receiver.cyclesPerByte); } } protected void deactivate() { if (activated) { activated = false; clock.removeEvent(this); } } } private class Transmitter extends Medium.Transmitter { Transmitter(Medium m) { super(m, sim.getClock()); } public byte nextByte() { if (radioPrinter != null) { radioPrinter.println("CC1000 "+StringUtil.to0xHex(txBuffer, 2)+" --------> "); } return txBuffer; } } private class Receiver extends Medium.Receiver { Receiver(Medium m) { super(m, sim.getClock()); } //cc2420 functions that have to be written by radio public void setRSSI (double PRec){ //do nothing } public void setBER (double BER){ //do nothing } public byte nextByte(boolean lock, byte val) { if (lock) { int delta = (int)(clock.getCount() - spiTick); int offset = (int)(8 * (delta % cyclesPerByte) / cyclesPerByte); // shift in the new bits rxBuffer = (rxBuffer << 8) | (~val & 0xff) << offset; if (radioPrinter != null) { radioPrinter.println("CC1000 <======== "+StringUtil.to0xHex(val, 2)); } } else { rxBuffer = 0; if (radioPrinter != null) { radioPrinter.println("CC1000 lock lost"); } } return val; } } private class RSSIOutput implements ADC.ADCInput { public float getVoltage() { if (receiver.isChannelClear(0,0)) { return ADC.VBG_LEVEL; } else return 0.000f; } } protected Medium medium; protected Transmitter transmitter; protected Receiver receiver; protected SPITicker ticker; protected RSSIOutput rssiOutput; long spiTick; byte txBuffer; int rxBuffer; public static Medium createMedium(Synchronizer synch, Medium.Arbitrator arb) { // TODO: we only support 19200 kbit/s return new Medium(synch, arb, 19200, 4, 8, 128 * 8); } public Simulator getSimulator() { return sim; } public Medium.Transmitter getTransmitter() { return transmitter; } public Medium.Receiver getReceiver() { return receiver; } public void setMedium(Medium m) { medium = m; transmitter = new Transmitter(m); receiver = new Receiver(m); } public Medium getMedium() { return medium; } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.jnigen; import java.io.InputStream; import java.nio.Buffer; import java.util.ArrayList; import com.badlogic.gdx.jnigen.parsing.CMethodParser; import com.badlogic.gdx.jnigen.parsing.CMethodParser.CMethod; import com.badlogic.gdx.jnigen.parsing.CMethodParser.CMethodParserResult; import com.badlogic.gdx.jnigen.parsing.JavaMethodParser; import com.badlogic.gdx.jnigen.parsing.JavaMethodParser.Argument; import com.badlogic.gdx.jnigen.parsing.JavaMethodParser.JavaMethod; import com.badlogic.gdx.jnigen.parsing.JavaMethodParser.JavaSegment; import com.badlogic.gdx.jnigen.parsing.JavaMethodParser.JniSection; import com.badlogic.gdx.jnigen.parsing.JniHeaderCMethodParser; import com.badlogic.gdx.jnigen.parsing.RobustJavaMethodParser; /** Goes through a Java source directory, checks each .java file for native methods and emits C/C++ code accordingly, both .h and * .cpp files. * * <h2>Augmenting Java Files with C/C++</h2> C/C++ code can be directly added to native methods in the Java file as block comments * starting at the same line as the method signature. Custom JNI code that is not associated with a native method can be added via * a special block comment as shown below.</p> * * All arguments can be accessed by the name specified in the Java native method signature (unless you use $ in your identifier * which is allowed in Java). * * <pre> * package com.badlogic.jnigen; * * public class MyJniClass { * /*JNI * #include &lt;math.h&gt; * *<i>/</i> * * public native void addToArray(float[] array, int len, float value); /* * for(int i = 0; i < len; i++) { * array[i] = value; * } * *<i>/</i> * } * </pre> * * The generated header file is automatically included in the .cpp file. Methods and custom JNI code can be mixed throughout the * Java file, their order is preserved in the generated .cpp file. Method overloading is supported but not recommended as the * overloading detection is very basic.</p> * * If a native method has strings, one dimensional primitive arrays or direct {@link Buffer} instances as arguments, JNI setup and * cleanup code is automatically generated.</p> * * The following list gives the mapping from Java to C/C++ types for arguments: * * <table border="1"> * <tr> * <td>Java</td> * <td>C/C++</td> * </tr> * <tr> * <td>String</td> * <td>char* (UTF-8)</td> * </tr> * <tr> * <td>boolean[]</td> * <td>bool*</td> * </tr> * <tr> * <td>byte[]</td> * <td>char*</td> * </tr> * <tr> * <td>char[]</td> * <td>unsigned short*</td> * </tr> * <tr> * <td>short[]</td> * <td>short*</td> * </tr> * <tr> * <td>int[]</td> * <td>int*</td> * </tr> * <tr> * <td>long[]</td> * <td>long long*</td> * </tr> * <tr> * <td>float[]</td> * <td>float*</td> * </tr> * <tr> * <td>double[]</td> * <td>double*</td> * </tr> * <tr> * <td>Buffer</td> * <td>unsigned char*</td> * </tr> * <tr> * <td>ByteBuffer</td> * <td>char*</td> * </tr> * <tr> * <td>CharBuffer</td> * <td>unsigned short*</td> * </tr> * <tr> * <td>ShortBuffer</td> * <td>short*</td> * </tr> * <tr> * <td>IntBuffer</td> * <td>int*</td> * </tr> * <tr> * <td>LongBuffer</td> * <td>long long*</td> * </tr> * <tr> * <td>FloatBuffer</td> * <td>float*</td> * </tr> * <tr> * <td>DoubleBuffer</td> * <td>double*</td> * </tr> * <tr> * <td>Anything else</td> * <td>jobject/jobjectArray</td> * </tr> * </table> * * If you need control over setting up and cleaning up arrays/strings and direct buffers you can tell the NativeCodeGenerator to * omit setup and cleanup code by starting the native code block comment with "/*MANUAL" instead of just "/*" to the method name. * See libgdx's Gdx2DPixmap load() method for an example. * * <h2>.h/.cpp File Generation</h2> The .h files are created via javah, which has to be on your path. The Java classes have to be * compiled and accessible to the javah tool. The name of the generated .h/.cpp files is the fully qualified name of the class, * e.g. com.badlogic.jnigen.MyJniClass.h/.cpp. The generator takes the following parameters as input: * * <ul> * <li>Java source directory, containing the .java files, e.g. src/ in an Eclipse project</li> * <li>Java class directory, containing the compiled .class files, e.g. bin/ in an Eclipse project</li> * <li>JNI output directory, where the resulting .h and .cpp files will be stored, e.g. jni/</li> * </ul> * * A default invocation of the generator looks like this: * * <pre> * new NativeCodeGenerator().generate(&quot;src&quot;, &quot;bin&quot;, &quot;jni&quot;); * </pre> * * To automatically compile and load the native code, see the classes {@link AntScriptGenerator}, {@link BuildExecutor} and * {@link JniGenSharedLibraryLoader} classes. </p> * * @author mzechner */ public class NativeCodeGenerator { private static final String JNI_METHOD_MARKER = "native"; private static final String JNI_ARG_PREFIX = "obj_"; private static final String JNI_RETURN_VALUE = "JNI_returnValue"; private static final String JNI_WRAPPER_PREFIX = "wrapped_"; FileDescriptor sourceDir; String classpath; FileDescriptor jniDir; String[] includes; String[] excludes; AntPathMatcher matcher = new AntPathMatcher(); JavaMethodParser javaMethodParser = new RobustJavaMethodParser(); CMethodParser cMethodParser = new JniHeaderCMethodParser(); CMethodParserResult cResult; /** Generates .h/.cpp files from the Java files found in "src/", with their .class files being in "bin/". The generated files * will be stored in "jni/". All paths are relative to the applications working directory. * @throws Exception */ public void generate () throws Exception { generate("src", "bin", "jni", null, null); } /** Generates .h/.cpp fiels from the Java files found in <code>sourceDir</code>, with their .class files being in * <code>classpath</code>. The generated files will be stored in <code>jniDir</code>. All paths are relative to the * applications working directory. * @param sourceDir the directory containing the Java files * @param classpath the directory containing the .class files * @param jniDir the output directory * @throws Exception */ public void generate (String sourceDir, String classpath, String jniDir) throws Exception { generate(sourceDir, classpath, jniDir, null, null); } /** Generates .h/.cpp fiels from the Java files found in <code>sourceDir</code>, with their .class files being in * <code>classpath</code>. The generated files will be stored in <code>jniDir</code>. The <code>includes</code> and * <code>excludes</code> parameters allow to specify directories and files that should be included/excluded from the * generation. These can be given in the Ant path format. All paths are relative to the applications working directory. * @param sourceDir the directory containing the Java files * @param classpath the directory containing the .class files * @param jniDir the output directory * @param includes files/directories to include, can be null (all files are used) * @param excludes files/directories to exclude, can be null (no files are excluded) * @throws Exception */ public void generate (String sourceDir, String classpath, String jniDir, String[] includes, String[] excludes) throws Exception { this.sourceDir = new FileDescriptor(sourceDir); this.jniDir = new FileDescriptor(jniDir); this.classpath = classpath; this.includes = includes; this.excludes = excludes; // check if source directory exists if (!this.sourceDir.exists()) { throw new Exception("Java source directory '" + sourceDir + "' does not exist"); } // generate jni directory if necessary if (!this.jniDir.exists()) { if (!this.jniDir.mkdirs()) { throw new Exception("Couldn't create JNI directory '" + jniDir + "'"); } } // process the source directory, emitting c/c++ files to jniDir processDirectory(this.sourceDir); } private void processDirectory (FileDescriptor dir) throws Exception { FileDescriptor[] files = dir.list(); for (FileDescriptor file : files) { if (file.isDirectory()) { if (file.path().contains(".svn")) continue; if (excludes != null && matcher.match(file.path(), excludes)) continue; processDirectory(file); } else { if (file.extension().equals("java")) { if (file.name().contains("NativeCodeGenerator")) continue; if (includes != null && !matcher.match(file.path(), includes)) continue; if (excludes != null && matcher.match(file.path(), excludes)) continue; String className = getFullyQualifiedClassName(file); FileDescriptor hFile = new FileDescriptor(jniDir.path() + "/" + className + ".h"); FileDescriptor cppFile = new FileDescriptor(jniDir + "/" + className + ".cpp"); if (file.lastModified() < cppFile.lastModified()) { System.out.println("C/C++ for '" + file.path() + "' up to date"); continue; } String javaContent = file.readString(); if (javaContent.contains(JNI_METHOD_MARKER)) { ArrayList<JavaSegment> javaSegments = javaMethodParser.parse(javaContent); if (javaSegments.size() == 0) { System.out.println("Skipping '" + file + "', no JNI code found."); continue; } System.out.print("Generating C/C++ for '" + file + "'..."); generateHFile(file); generateCppFile(javaSegments, hFile, cppFile); System.out.println("done"); } } } } } private String getFullyQualifiedClassName (FileDescriptor file) { String className = file.path().replace(sourceDir.path(), "").replace('\\', '.').replace('/', '.').replace(".java", ""); if (className.startsWith(".")) className = className.substring(1); return className; } private void generateHFile (FileDescriptor file) throws Exception { String className = getFullyQualifiedClassName(file); String command = "javah -classpath " + classpath + " -o " + jniDir.path() + "/" + className + ".h " + className; Process process = Runtime.getRuntime().exec(command); process.waitFor(); if (process.exitValue() != 0) { System.out.println(); InputStream errorStream = process.getErrorStream(); int c = 0; while ((c = errorStream.read()) != -1) { System.out.print((char)c); } System.out.println("Command: " + command); } } private void generateCppFile (ArrayList<JavaSegment> javaSegments, FileDescriptor hFile, FileDescriptor cppFile) throws Exception { String headerFileContent = hFile.readString(); ArrayList<CMethod> cMethods = cMethodParser.parse(headerFileContent).getMethods(); StringBuffer buffer = new StringBuffer(); buffer.append("#include <" + hFile.name() + ">\n"); for (JavaSegment segment : javaSegments) { if (segment instanceof JniSection) { emitJniSection(buffer, (JniSection)segment); } if (segment instanceof JavaMethod) { JavaMethod javaMethod = (JavaMethod)segment; if (javaMethod.getNativeCode() == null) { throw new RuntimeException("Method '" + javaMethod.getName() + "' has no body"); } CMethod cMethod = findCMethod(javaMethod, cMethods); if (cMethod == null) throw new RuntimeException("Couldn't find C method for Java method '" + javaMethod.getClassName() + "#" + javaMethod.getName() + "'"); emitJavaMethod(buffer, javaMethod, cMethod); } } cppFile.writeString(buffer.toString(), false, "UTF-8"); } private CMethod findCMethod (JavaMethod javaMethod, ArrayList<CMethod> cMethods) { for (CMethod cMethod : cMethods) { if (cMethod.getHead().endsWith(javaMethod.getClassName() + "_" + javaMethod.getName()) || cMethod.getHead().contains(javaMethod.getClassName() + "_" + javaMethod.getName() + "__")) { // FIXME poor man's overloaded method check... // FIXME float test[] won't work, needs to be float[] test. if (cMethod.getArgumentTypes().length - 2 == javaMethod.getArguments().size()) { boolean match = true; for (int i = 2; i < cMethod.getArgumentTypes().length; i++) { String cType = cMethod.getArgumentTypes()[i]; String javaType = javaMethod.getArguments().get(i - 2).getType().getJniType(); if (!cType.equals(javaType)) { match = false; break; } } if (match) { return cMethod; } } } } return null; } private void emitLineMarker (StringBuffer buffer, int line) { buffer.append("\n//@line:"); buffer.append(line); buffer.append("\n"); } private void emitJniSection (StringBuffer buffer, JniSection section) { emitLineMarker(buffer, section.getStartIndex()); buffer.append(section.getNativeCode().replace("\r", "")); } private void emitJavaMethod (StringBuffer buffer, JavaMethod javaMethod, CMethod cMethod) { // get the setup and cleanup code for arrays, buffers and strings StringBuffer jniSetupCode = new StringBuffer(); StringBuffer jniCleanupCode = new StringBuffer(); StringBuffer additionalArgs = new StringBuffer(); StringBuffer wrapperArgs = new StringBuffer(); emitJniSetupCode(jniSetupCode, javaMethod, additionalArgs, wrapperArgs); emitJniCleanupCode(jniCleanupCode, javaMethod, cMethod); // check if the user wants to do manual setup of JNI args boolean isManual = javaMethod.isManual(); // if we have disposable arguments (string, buffer, array) and if there is a return // in the native code (conservative, not syntactically checked), emit a wrapper method. if (javaMethod.hasDisposableArgument() && javaMethod.getNativeCode().contains("return")) { // if the method is marked as manual, we just emit the signature and let the // user do whatever she wants. if (isManual) { emitMethodSignature(buffer, javaMethod, cMethod, null, false); emitMethodBody(buffer, javaMethod); buffer.append("}\n\n"); } else { // emit the method containing the actual code, called by the wrapper // method with setup pointers to arrays, buffers and strings String wrappedMethodName = emitMethodSignature(buffer, javaMethod, cMethod, additionalArgs.toString()); emitMethodBody(buffer, javaMethod); buffer.append("}\n\n"); // emit the wrapper method, the one with the declaration in the header file emitMethodSignature(buffer, javaMethod, cMethod, null); if (!isManual) { buffer.append(jniSetupCode); } if (cMethod.getReturnType().equals("void")) { buffer.append("\t" + wrappedMethodName + "(" + wrapperArgs.toString() + ");\n\n"); if (!isManual) { buffer.append(jniCleanupCode); } buffer.append("\treturn;\n"); } else { buffer.append("\t" + cMethod.getReturnType() + " " + JNI_RETURN_VALUE + " = " + wrappedMethodName + "(" + wrapperArgs.toString() + ");\n\n"); if (!isManual) { buffer.append(jniCleanupCode); } buffer.append("\treturn " + JNI_RETURN_VALUE + ";\n"); } buffer.append("}\n\n"); } } else { emitMethodSignature(buffer, javaMethod, cMethod, null); if (!isManual) { buffer.append(jniSetupCode); } emitMethodBody(buffer, javaMethod); if (!isManual) { buffer.append(jniCleanupCode); } buffer.append("}\n\n"); } } private void emitMethodBody (StringBuffer buffer, JavaMethod javaMethod) { // emit a line marker emitLineMarker(buffer, javaMethod.getEndIndex()); // FIXME add tabs cleanup buffer.append(javaMethod.getNativeCode()); buffer.append("\n"); } private String emitMethodSignature (StringBuffer buffer, JavaMethod javaMethod, CMethod cMethod, String additionalArguments) { return emitMethodSignature(buffer, javaMethod, cMethod, additionalArguments, true); } private String emitMethodSignature (StringBuffer buffer, JavaMethod javaMethod, CMethod cMethod, String additionalArguments, boolean appendPrefix) { // emit head, consisting of JNIEXPORT,return type and method name // if this is a wrapped method, prefix the method name String wrappedMethodName = null; if (additionalArguments != null) { String[] tokens = cMethod.getHead().replace("\r\n", "").replace("\n", "").split(" "); wrappedMethodName = JNI_WRAPPER_PREFIX + tokens[3]; buffer.append("static inline "); buffer.append(tokens[1]); buffer.append(" "); buffer.append(wrappedMethodName); buffer.append("\n"); } else { buffer.append(cMethod.getHead()); } // construct argument list // Differentiate between static and instance method, then output each argument if (javaMethod.isStatic()) { buffer.append("(JNIEnv* env, jclass clazz"); } else { buffer.append("(JNIEnv* env, jobject object"); } if (javaMethod.getArguments().size() > 0) buffer.append(", "); for (int i = 0; i < javaMethod.getArguments().size(); i++) { // output the argument type as defined in the header buffer.append(cMethod.getArgumentTypes()[i + 2]); buffer.append(" "); // if this is not a POD or an object, we need to add a prefix // as we will output JNI code to get pointers to strings, arrays // and direct buffers. Argument javaArg = javaMethod.getArguments().get(i); if (!javaArg.getType().isPlainOldDataType() && !javaArg.getType().isObject() && appendPrefix) { buffer.append(JNI_ARG_PREFIX); } // output the name of the argument buffer.append(javaArg.getName()); // comma, if this is not the last argument if (i < javaMethod.getArguments().size() - 1) buffer.append(", "); } // if this is a wrapper method signature, add the additional arguments if (additionalArguments != null) { buffer.append(additionalArguments); } // close signature, open method body buffer.append(") {\n"); // return the wrapped method name if any return wrappedMethodName; } private void emitJniSetupCode (StringBuffer buffer, JavaMethod javaMethod, StringBuffer additionalArgs, StringBuffer wrapperArgs) { // add environment and class/object as the two first arguments for // wrapped method. if (javaMethod.isStatic()) { wrapperArgs.append("env, clazz, "); } else { wrapperArgs.append("env, object, "); } // arguments for wrapper method for (int i = 0; i < javaMethod.getArguments().size(); i++) { Argument arg = javaMethod.getArguments().get(i); if (!arg.getType().isPlainOldDataType() && !arg.getType().isObject()) { wrapperArgs.append(JNI_ARG_PREFIX); } // output the name of the argument wrapperArgs.append(arg.getName()); if (i < javaMethod.getArguments().size() - 1) wrapperArgs.append(", "); } // direct buffer pointers for (Argument arg : javaMethod.getArguments()) { if (arg.getType().isBuffer()) { String type = arg.getType().getBufferCType(); buffer.append("\t" + type + " " + arg.getName() + " = (" + type + ")(" + JNI_ARG_PREFIX + arg.getName() + "?env->GetDirectBufferAddress(" + JNI_ARG_PREFIX + arg.getName() + "):0);\n"); additionalArgs.append(", "); additionalArgs.append(type); additionalArgs.append(" "); additionalArgs.append(arg.getName()); wrapperArgs.append(", "); wrapperArgs.append(arg.getName()); } } // string pointers for (Argument arg : javaMethod.getArguments()) { if (arg.getType().isString()) { String type = "char*"; buffer.append("\t" + type + " " + arg.getName() + " = (" + type + ")env->GetStringUTFChars(" + JNI_ARG_PREFIX + arg.getName() + ", 0);\n"); additionalArgs.append(", "); additionalArgs.append(type); additionalArgs.append(" "); additionalArgs.append(arg.getName()); wrapperArgs.append(", "); wrapperArgs.append(arg.getName()); } } // Array pointers, we have to collect those last as GetPrimitiveArrayCritical // will explode into our face if we call another JNI method after that. for (Argument arg : javaMethod.getArguments()) { if (arg.getType().isPrimitiveArray()) { String type = arg.getType().getArrayCType(); buffer.append("\t" + type + " " + arg.getName() + " = (" + type + ")env->GetPrimitiveArrayCritical(" + JNI_ARG_PREFIX + arg.getName() + ", 0);\n"); additionalArgs.append(", "); additionalArgs.append(type); additionalArgs.append(" "); additionalArgs.append(arg.getName()); wrapperArgs.append(", "); wrapperArgs.append(arg.getName()); } } // new line for separation buffer.append("\n"); } private void emitJniCleanupCode (StringBuffer buffer, JavaMethod javaMethod, CMethod cMethod) { // emit cleanup code for arrays, must come first for (Argument arg : javaMethod.getArguments()) { if (arg.getType().isPrimitiveArray()) { buffer.append("\tenv->ReleasePrimitiveArrayCritical(" + JNI_ARG_PREFIX + arg.getName() + ", " + arg.getName() + ", 0);\n"); } } // emit cleanup code for strings for (Argument arg : javaMethod.getArguments()) { if (arg.getType().isString()) { buffer.append("\tenv->ReleaseStringUTFChars(" + JNI_ARG_PREFIX + arg.getName() + ", " + arg.getName() + ");\n"); } } // new line for separation buffer.append("\n"); } }
/*<license> Copyright 2004 - $Date$ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.vernacular.semantics_VI.exception; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.ppwcode.util.reflect_I.PropertyHelpers.hasProperty; import static org.ppwcode.vernacular.exception_III.ApplicationException.DEFAULT_MESSAGE_KEY; import java.lang.reflect.InvocationTargetException; import java.util.Date; import java.util.HashSet; import java.util.Set; import org.apache.commons.beanutils.PropertyUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.ppwcode.vernacular.semantics_VI.exception.PropertyExceptionTest.OriginStub; public class ValuePropertyExceptionTest { public final static String EMPTY = ""; private Set<OriginStub> origins; private Set<String> propertyNames; // not empty private Set<Object> propertyValues; private Set<String> messages; // not empty private Set<Throwable> throwables; public Set<ValuePropertyException> subjects; private Set<Object> origins2; private Set<Class<?>> originTypes2; private Set<String> propertyNames2; // not empty private Set<String> messages2; // not empty private Set<Throwable> throwables2; @Before public void setUp() throws Exception { // originMock = new OriginStub(); messages = new HashSet<String>(); messages.add(null); messages.add("stub message"); propertyNames = new HashSet<String>(); // null not allowed as propertyname propertyNames.add("stubProperty"); propertyValues = new HashSet<Object>(); propertyValues.add(null); propertyValues.add(new Object()); propertyValues.add(new String()); propertyValues.add(new Date()); throwables = new HashSet<Throwable>(); throwables.add(null); throwables.add(new Throwable()); origins = new HashSet<OriginStub>(); for (Object propertyValue : propertyValues) { OriginStub origin = new OriginStub(); origin.setStubProperty(propertyValue); origins.add(origin); } subjects = createSubjects(); origins2 = new HashSet<Object>(origins); origins2.add(null); origins2.add(new Object()); originTypes2 = new HashSet<Class<?>>(); originTypes2.add(OriginStub.class); originTypes2.add(null); originTypes2.add(Object.class); originTypes2.add(PropertyException.class); propertyNames2 = new HashSet<String>(propertyNames); propertyNames2.add(EMPTY); propertyNames2.add("not a property"); messages2 = new HashSet<String>(messages); messages2.add(EMPTY); messages2.add("another message"); throwables2 = new HashSet<Throwable>(throwables); throwables2.add(new Exception()); } private Set<ValuePropertyException> createSubjects() { Set<ValuePropertyException> result = new HashSet<ValuePropertyException>(); for (String message : messages) { for (String propertyName : propertyNames) { for (Object origin : origins) { for (Throwable t : throwables) { result.add(new ValuePropertyException(origin, propertyName, message, t)); } } } } return result; } @After public void tearDown() throws Exception { origins = null; messages = null; propertyNames = null; propertyValues = null; throwables = null; subjects = null; origins2 = null; propertyNames2 = null; messages2 = null; } public static void assertTypeInvariants(ValuePropertyException subject) { PropertyExceptionTest.assertTypeInvariants(subject); assertNotNull(subject.getPropertyName()); assertTrue(subject.getOrigin() == null ? subject.getPropertyValue() == null : true); } private void testValuePropertyExceptionObjectStringObjectStringThrowable(final Object origin, final String propertyName, final String message, final Throwable cause) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { // execute ValuePropertyException subject = new ValuePropertyException(origin, propertyName, message, cause); // validate assertEquals(origin, subject.getOrigin()); assertEquals(origin.getClass(), subject.getOriginType()); assertEquals(propertyName, subject.getPropertyName()); assertEquals(PropertyUtils.getProperty(origin, propertyName), subject.getPropertyValue()); assertEquals(message == null ? DEFAULT_MESSAGE_KEY : message, subject.getMessage()); assertEquals(cause, subject.getCause()); PropertyExceptionTest.assertTypeInvariants(subject); assertTypeInvariants(subject); } @Test public void testValuePropertyExceptionObjectStringObjectStringThrowable() throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { for (Object origin : origins) { for (String message : messages) { for (String propertyName : propertyNames) { for (Throwable t : throwables) { testValuePropertyExceptionObjectStringObjectStringThrowable(origin, propertyName, message, t); } } } } } // private void testValuePropertyExceptionObjectBooleanStringObjectStringThrowable(final Object origin, // final boolean inOriginInitialization, // final String propertyName, // final String message, // final Throwable cause) // throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { // // execute // ValuePropertyException subject = new ValuePropertyException(origin, inOriginInitialization, propertyName, message, cause); // // validate // assertTrue(inOriginInitialization ? subject.getOrigin() == null : subject.getOrigin() == origin); // assertEquals(origin.getClass(), subject.getOriginType()); // assertEquals(propertyName, subject.getPropertyName()); // assertEquals(inOriginInitialization ? null : PropertyUtils.getProperty(origin, propertyName), // subject.getPropertyValue()); // assertEquals(message == null ? DEFAULT_MESSAGE_KEY : message, subject.getMessage()); // assertEquals(cause, subject.getCause()); // PropertyExceptionTest.assertTypeInvariants(subject); // assertTypeInvariants(subject); // } // // @Test // public void testValuePropertyExceptionObjectBooleanStringObjectStringThrowable() // throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { // for (boolean inOriginInitialization : booleans) { // for (Object origin : origins) { // for (String message : messages) { // for (String propertyName : propertyNames) { // for (Throwable t : throwables) { // testValuePropertyExceptionObjectBooleanStringObjectStringThrowable(origin, inOriginInitialization, propertyName, message, t); // } // } // } // } // } // } // private void testValuePropertyExceptionClassOfQStringObjectStringThrowable(final Class<?> originType, // final String propertyName, // final String message, // final Throwable cause) { // // execute // ValuePropertyException subject = new ValuePropertyException(originType, propertyName, message, cause); // // validate // assertNull(subject.getOrigin()); // assertEquals(originType, subject.getOriginType()); // assertEquals(propertyName, subject.getPropertyName()); // assertNull(subject.getPropertyValue()); // assertEquals(message == null ? DEFAULT_MESSAGE_KEY : message, subject.getMessage()); // assertEquals(cause, subject.getCause()); // PropertyExceptionTest.assertTypeInvariants(subject); // assertTypeInvariants(subject); // } // Method no longer exists // @Test // public void testValuePropertyExceptionClassOfQStringObjectStringThrowable() { // for (String message : messages) { // for (String propertyName : propertyNames) { // for (Throwable t : throwables) { // testValuePropertyExceptionClassOfQStringObjectStringThrowable(OriginStub.class, propertyName, message, t); // } // } // } // } public static void testLike(ValuePropertyException subject, PropertyException other) { // execute boolean result = subject.like(other); // validate PropertyExceptionTest.testLike(subject, other); assertTrue(result ? eqn(subject.getPropertyValue(), ((ValuePropertyException)other).getPropertyValue()) : true); assertTypeInvariants(subject); } private static boolean eqn(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } @Test public void testLike() { for (ValuePropertyException subject : subjects) { for (Object origin : origins2) { if (origin != null) { for (String propertyName : propertyNames2) { if ((propertyName == null) || hasProperty(origin.getClass(), propertyName)) { for (String message : messages2) { if ((message == null) || (! message.equals(""))) { for (Throwable cause : throwables2) { testLike(subject, new ValuePropertyException(origin, propertyName, message, cause)); testLike(subject, new PropertyException(origin, propertyName, message, cause)); } } } } } } testLike(subject, null); } } } // @Test // public void testHasPropertiesObjectStringStringThrowable() { // for (ValuePropertyException subject : subjects) { // for (Object origin : origins2) { // for (String propertyName : propertyNames2) { // for (String message : messages2) { // for (Throwable cause : throwables2) { // PropertyExceptionTest.testHasPropertiesObjectStringStringThrowable(subject, origin, propertyName, message, cause); // assertTypeInvariants(subject); // } // } // } // } // } // } // // @Test // public void testHasPropertiesClassOfQStringStringThrowable() { // for (ValuePropertyException subject : subjects) { // for (Class<?> originType : originTypes2) { // for (String propertyName : propertyNames2) { // for (String message : messages2) { // for (Throwable cause : throwables2) { // PropertyExceptionTest.testHasPropertiesClassOfQStringStringThrowable(subject, originType, propertyName, message, cause); // assertTypeInvariants(subject); // } // } // } // } // } // } // // @Test // public void testReportsOnObjectStringStringThrowable() { // for (ValuePropertyException subject : subjects) { // for (Object origin : origins2) { // for (String propertyName : propertyNames2) { // for (String message : messages2) { // for (Throwable cause : throwables2) { // PropertyExceptionTest.testReportsOnObjectStringStringThrowable(subject, origin, propertyName, message, cause); // assertTypeInvariants(subject); // } // } // } // } // } // } // // @Test // public void testReportsOnClassOfQStringStringThrowable() { // for (ValuePropertyException subject : subjects) { // for (Class<?> originType : originTypes2) { // for (String propertyName : propertyNames2) { // for (String message : messages2) { // for (Throwable cause : throwables2) { // PropertyExceptionTest.testReportsOnClassOfQStringStringThrowable(subject, originType, propertyName, message, cause); // assertTypeInvariants(subject); // } // } // } // } // } // } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.se_bastiaan.tvnl.exomedia.okhttp; import com.google.android.exoplayer.C; import com.google.android.exoplayer.upstream.DataSpec; import com.google.android.exoplayer.upstream.HttpDataSource; import com.google.android.exoplayer.upstream.TransferListener; import com.google.android.exoplayer.util.Assertions; import com.google.android.exoplayer.util.Predicate; import okhttp3.CacheControl; import okhttp3.HttpUrl; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; import okhttp3.internal.Util; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; /** * An {@link HttpDataSource} that delegates to Square's {@link OkHttpClient}. */ public class OkHttpDataSource implements HttpDataSource { private static final AtomicReference<byte[]> skipBufferReference = new AtomicReference<>(); private final OkHttpClient okHttpClient; private final String userAgent; private final Predicate<String> contentTypePredicate; private final TransferListener listener; private final CacheControl cacheControl; private final HashMap<String, String> requestProperties; private DataSpec dataSpec; private Response response; private InputStream responseByteStream; private boolean opened; private long bytesToSkip; private long bytesToRead; private long bytesSkipped; private long bytesRead; /** * @param client An {@link OkHttpClient} for use by the source. * @param userAgent The User-Agent string that should be used. * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the * predicate then a * {@link com.google.android.exoplayer.upstream.HttpDataSource.InvalidContentTypeException} is * thrown from {@link #open(DataSpec)}. */ public OkHttpDataSource(OkHttpClient client, String userAgent, Predicate<String> contentTypePredicate) { this(client, userAgent, contentTypePredicate, null); } /** * @param client An {@link OkHttpClient} for use by the source. * @param userAgent The User-Agent string that should be used. * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the * predicate then a * {@link com.google.android.exoplayer.upstream.HttpDataSource.InvalidContentTypeException} is * thrown from {@link #open(DataSpec)}. * @param listener An optional listener. */ public OkHttpDataSource(OkHttpClient client, String userAgent, Predicate<String> contentTypePredicate, TransferListener listener) { this(client, userAgent, contentTypePredicate, listener, null); } /** * @param client An {@link OkHttpClient} for use by the source. * @param userAgent The User-Agent string that should be used. * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the * predicate then a * {@link com.google.android.exoplayer.upstream.HttpDataSource.InvalidContentTypeException} is * thrown from {@link #open(DataSpec)}. * @param listener An optional listener. * @param cacheControl An optional {@link CacheControl} which sets all requests' Cache-Control * header. For example, you could force the network response for all requests. * */ public OkHttpDataSource(OkHttpClient client, String userAgent, Predicate<String> contentTypePredicate, TransferListener listener, CacheControl cacheControl) { this.okHttpClient = Assertions.checkNotNull(client); this.userAgent = Assertions.checkNotEmpty(userAgent); this.contentTypePredicate = contentTypePredicate; this.listener = listener; this.cacheControl = cacheControl; this.requestProperties = new HashMap<>(); } @Override public String getUri() { return response == null ? null : response.request().url().toString(); } @Override public Map<String, List<String>> getResponseHeaders() { return response == null ? null : response.headers().toMultimap(); } @Override public void setRequestProperty(String name, String value) { Assertions.checkNotNull(name); Assertions.checkNotNull(value); synchronized (requestProperties) { requestProperties.put(name, value); } } @Override public void clearRequestProperty(String name) { Assertions.checkNotNull(name); synchronized (requestProperties) { requestProperties.remove(name); } } @Override public void clearAllRequestProperties() { synchronized (requestProperties) { requestProperties.clear(); } } @Override public long open(DataSpec dataSpec) throws HttpDataSourceException { this.dataSpec = dataSpec; this.bytesRead = 0; this.bytesSkipped = 0; Request request = makeRequest(dataSpec); try { response = okHttpClient.newCall(request).execute(); responseByteStream = response.body().byteStream(); } catch (IOException e) { throw new HttpDataSourceException("Unable to connect to " + dataSpec.uri.toString(), e, dataSpec, HttpDataSourceException.TYPE_OPEN); } int responseCode = response.code(); // Check for a valid response code. if (!response.isSuccessful()) { Map<String, List<String>> headers = request.headers().toMultimap(); closeConnectionQuietly(); throw new InvalidResponseCodeException(responseCode, headers, dataSpec); } // Check for a valid content type. String contentType = response.body().contentType().toString(); if (contentTypePredicate != null && !contentTypePredicate.evaluate(contentType)) { closeConnectionQuietly(); throw new InvalidContentTypeException(contentType, dataSpec); } // If we requested a range starting from a non-zero position and received a 200 rather than a // 206, then the server does not support partial requests. We'll need to manually skip to the // requested position. bytesToSkip = responseCode == 200 && dataSpec.position != 0 ? dataSpec.position : 0; // Determine the length of the data to be read, after skipping. long contentLength = response.body().contentLength(); bytesToRead = dataSpec.length != C.LENGTH_UNBOUNDED ? dataSpec.length : contentLength != -1 ? contentLength - bytesToSkip : C.LENGTH_UNBOUNDED; opened = true; if (listener != null) { listener.onTransferStart(); } return bytesToRead; } @Override public int read(byte[] buffer, int offset, int readLength) throws HttpDataSourceException { try { skipInternal(); return readInternal(buffer, offset, readLength); } catch (IOException e) { throw new HttpDataSourceException(e, dataSpec, HttpDataSourceException.TYPE_READ); } } @Override public void close() throws HttpDataSourceException { if (opened) { opened = false; if (listener != null) { listener.onTransferEnd(); } closeConnectionQuietly(); } } /** * Returns the number of bytes that have been skipped since the most recent call to * {@link #open(DataSpec)}. * * @return The number of bytes skipped. */ protected final long bytesSkipped() { return bytesSkipped; } /** * Returns the number of bytes that have been read since the most recent call to * {@link #open(DataSpec)}. * * @return The number of bytes read. */ protected final long bytesRead() { return bytesRead; } /** * Returns the number of bytes that are still to be read for the current {@link DataSpec}. * <p> * If the total length of the data being read is known, then this length minus {@code bytesRead()} * is returned. If the total length is unknown, {@link C#LENGTH_UNBOUNDED} is returned. * * @return The remaining length, or {@link C#LENGTH_UNBOUNDED}. */ protected final long bytesRemaining() { return bytesToRead == C.LENGTH_UNBOUNDED ? bytesToRead : bytesToRead - bytesRead; } /** * Establishes a connection. */ private Request makeRequest(DataSpec dataSpec) { long position = dataSpec.position; long length = dataSpec.length; boolean allowGzip = (dataSpec.flags & DataSpec.FLAG_ALLOW_GZIP) != 0; HttpUrl url = HttpUrl.parse(dataSpec.uri.toString()); Request.Builder builder = new Request.Builder().url(url); if (cacheControl != null) { builder.cacheControl(cacheControl); } synchronized (requestProperties) { for (Map.Entry<String, String> property : requestProperties.entrySet()) { builder.addHeader(property.getKey(), property.getValue()); } } if (!(position == 0 && length == C.LENGTH_UNBOUNDED)) { String rangeRequest = "bytes=" + position + "-"; if (length != C.LENGTH_UNBOUNDED) { rangeRequest += (position + length - 1); } builder.addHeader("Range", rangeRequest); } builder.addHeader("User-Agent", userAgent); if (!allowGzip) { builder.addHeader("Accept-Encoding", "identity"); } if (dataSpec.postBody != null) { builder.post(RequestBody.create(null, dataSpec.postBody)); } return builder.build(); } /** * Skips any bytes that need skipping. Else does nothing. * <p> * This implementation is based roughly on {@code libcore.io.Streams.skipByReading()}. * * @throws InterruptedIOException If the thread is interrupted during the operation. * @throws EOFException If the end of the input stream is reached before the bytes are skipped. */ private void skipInternal() throws IOException { if (bytesSkipped == bytesToSkip) { return; } // Acquire the shared skip buffer. byte[] skipBuffer = skipBufferReference.getAndSet(null); if (skipBuffer == null) { skipBuffer = new byte[4096]; } while (bytesSkipped != bytesToSkip) { int readLength = (int) Math.min(bytesToSkip - bytesSkipped, skipBuffer.length); int read = responseByteStream.read(skipBuffer, 0, readLength); if (Thread.interrupted()) { throw new InterruptedIOException(); } if (read == -1) { throw new EOFException(); } bytesSkipped += read; if (listener != null) { listener.onBytesTransferred(read); } } // Release the shared skip buffer. skipBufferReference.set(skipBuffer); } /** * Reads up to {@code length} bytes of data and stores them into {@code buffer}, starting at * index {@code offset}. * <p> * This method blocks until at least one byte of data can be read, the end of the opened range is * detected, or an exception is thrown. * * @param buffer The buffer into which the read data should be stored. * @param offset The start offset into {@code buffer} at which data should be written. * @param readLength The maximum number of bytes to read. * @return The number of bytes read, or {@link C#RESULT_END_OF_INPUT} if the end of the opened * range is reached. * @throws IOException If an error occurs reading from the source. */ private int readInternal(byte[] buffer, int offset, int readLength) throws IOException { readLength = bytesToRead == C.LENGTH_UNBOUNDED ? readLength : (int) Math.min(readLength, bytesToRead - bytesRead); if (readLength == 0) { // We've read all of the requested data. return C.RESULT_END_OF_INPUT; } int read = responseByteStream.read(buffer, offset, readLength); if (read == -1) { if (bytesToRead != C.LENGTH_UNBOUNDED && bytesToRead != bytesRead) { // The server closed the connection having not sent sufficient data. throw new EOFException(); } return C.RESULT_END_OF_INPUT; } bytesRead += read; if (listener != null) { listener.onBytesTransferred(read); } return read; } /** * Closes the current connection quietly, if there is one. */ private void closeConnectionQuietly() { Util.closeQuietly(response.body()); response = null; responseByteStream = null; } }
/* * @(#)SVGEllipse.java * * Copyright (c) 1996-2010 by the original authors of JHotDraw * and all its contributors. * All rights reserved. * * The copyright of this software is owned by the authors and * contributors of the JHotDraw project ("the copyright holders"). * You may not use, copy or modify this software, except in * accordance with the license agreement you entered into with * the copyright holders. For details see accompanying license terms. */ package org.jhotdraw.samples.svg.figures; import org.jhotdraw.draw.handle.TransformHandleKit; import org.jhotdraw.draw.handle.ResizeHandleKit; import org.jhotdraw.draw.handle.Handle; import org.jhotdraw.draw.connector.Connector; import org.jhotdraw.draw.ConnectionFigure; import java.awt.*; import java.awt.geom.*; import java.util.*; import org.jhotdraw.draw.*; import org.jhotdraw.draw.handle.BoundsOutlineHandle; import static org.jhotdraw.samples.svg.SVGAttributeKeys.*; import org.jhotdraw.geom.*; import org.jhotdraw.samples.svg.*; /** * SVGEllipse represents a SVG ellipse and a SVG circle element. * * @author Werner Randelshofer * @version $Id: SVGEllipseFigure.java 604 2010-01-09 12:00:29Z rawcoder $ */ public class SVGEllipseFigure extends SVGAttributedFigure implements SVGFigure { private Ellipse2D.Double ellipse; /** * This is used to perform faster drawing and hit testing. */ private transient Shape cachedTransformedShape; /** * This is used to perform faster hit testing. */ private transient Shape cachedHitShape; /** Creates a new instance. */ public SVGEllipseFigure() { this(0, 0, 0, 0); } public SVGEllipseFigure(double x, double y, double width, double height) { ellipse = new Ellipse2D.Double(x, y, width, height); SVGAttributeKeys.setDefaults(this); setConnectable(false); } // DRAWING protected void drawFill(Graphics2D g) { if (ellipse.width > 0 && ellipse.height > 0) { g.fill(ellipse); } } protected void drawStroke(Graphics2D g) { if (ellipse.width > 0 && ellipse.height > 0) { g.draw(ellipse); } } // SHAPE AND BOUNDS public double getX() { return ellipse.x; } public double getY() { return ellipse.y; } public double getWidth() { return ellipse.getWidth(); } public double getHeight() { return ellipse.getHeight(); } public Rectangle2D.Double getBounds() { return (Rectangle2D.Double) ellipse.getBounds2D(); } @Override public Rectangle2D.Double getDrawingArea() { Rectangle2D rx = getTransformedShape().getBounds2D(); Rectangle2D.Double r = (rx instanceof Rectangle2D.Double) ? (Rectangle2D.Double) rx : new Rectangle2D.Double(rx.getX(), rx.getY(), rx.getWidth(), rx.getHeight()); if (get(TRANSFORM) == null) { double g = SVGAttributeKeys.getPerpendicularHitGrowth(this) * 2d + 1; Geom.grow(r, g, g); } else { double strokeTotalWidth = AttributeKeys.getStrokeTotalWidth(this); double width = strokeTotalWidth / 2d; width *= Math.max(get(TRANSFORM).getScaleX(), get(TRANSFORM).getScaleY()) + 1; Geom.grow(r, width, width); } return r; } /** * Checks if a Point2D.Double is inside the figure. */ public boolean contains(Point2D.Double p) { return getHitShape().contains(p); } private Shape getTransformedShape() { if (cachedTransformedShape == null) { if (get(TRANSFORM) == null) { cachedTransformedShape = ellipse; } else { cachedTransformedShape = get(TRANSFORM).createTransformedShape(ellipse); } } return cachedTransformedShape; } private Shape getHitShape() { if (cachedHitShape == null) { if (get(FILL_COLOR) != null || get(FILL_GRADIENT) != null) { cachedHitShape = new GrowStroke( (float) SVGAttributeKeys.getStrokeTotalWidth(this) / 2f, (float) SVGAttributeKeys.getStrokeTotalMiterLimit(this)).createStrokedShape(getTransformedShape()); } else { cachedHitShape = SVGAttributeKeys.getHitStroke(this).createStrokedShape(getTransformedShape()); } } return cachedHitShape; } @Override public void setBounds(Point2D.Double anchor, Point2D.Double lead) { ellipse.x = Math.min(anchor.x, lead.x); ellipse.y = Math.min(anchor.y, lead.y); ellipse.width = Math.max(0.1, Math.abs(lead.x - anchor.x)); ellipse.height = Math.max(0.1, Math.abs(lead.y - anchor.y)); invalidate(); } /** * Transforms the figure. * * @param tx the transformation. */ public void transform(AffineTransform tx) { if (get(TRANSFORM) != null || (tx.getType() & (AffineTransform.TYPE_TRANSLATION)) != tx.getType()) { if (get(TRANSFORM) == null) { TRANSFORM.setClone(this, tx); } else { AffineTransform t = TRANSFORM.getClone(this); t.preConcatenate(tx); set(TRANSFORM, t); } } else { Point2D.Double anchor = getStartPoint(); Point2D.Double lead = getEndPoint(); setBounds( (Point2D.Double) tx.transform(anchor, anchor), (Point2D.Double) tx.transform(lead, lead)); if (get(FILL_GRADIENT) != null && !get(FILL_GRADIENT).isRelativeToFigureBounds()) { Gradient g = FILL_GRADIENT.getClone(this); g.transform(tx); set(FILL_GRADIENT, g); } if (get(STROKE_GRADIENT) != null && !get(STROKE_GRADIENT).isRelativeToFigureBounds()) { Gradient g = STROKE_GRADIENT.getClone(this); g.transform(tx); set(STROKE_GRADIENT, g); } } invalidate(); } public void restoreTransformTo(Object geometry) { Object[] restoreData = (Object[]) geometry; ellipse = (Ellipse2D.Double) ((Ellipse2D.Double) restoreData[0]).clone(); TRANSFORM.setClone(this, (AffineTransform) restoreData[1]); FILL_GRADIENT.setClone(this, (Gradient) restoreData[2]); STROKE_GRADIENT.setClone(this, (Gradient) restoreData[3]); invalidate(); } public Object getTransformRestoreData() { return new Object[]{ ellipse.clone(), TRANSFORM.getClone(this), FILL_GRADIENT.getClone(this), STROKE_GRADIENT.getClone(this),}; } // ATTRIBUTES // EDITING @Override public Collection<Handle> createHandles(int detailLevel) { LinkedList<Handle> handles = new LinkedList<Handle>(); switch (detailLevel % 2) { case -1: // Mouse hover handles handles.add(new BoundsOutlineHandle(this, false, true)); break; case 0: ResizeHandleKit.addResizeHandles(this, handles); handles.add(new LinkHandle(this)); break; case 1: TransformHandleKit.addTransformHandles(this, handles); break; default: break; } return handles; } // CONNECTING // COMPOSITE FIGURES // CLONING public SVGEllipseFigure clone() { SVGEllipseFigure that = (SVGEllipseFigure) super.clone(); that.ellipse = (Ellipse2D.Double) this.ellipse.clone(); that.cachedTransformedShape = null; return that; } // EVENT HANDLING public boolean isEmpty() { Rectangle2D.Double b = getBounds(); return b.width <= 0 || b.height <= 0; } @Override public void invalidate() { super.invalidate(); cachedTransformedShape = null; cachedHitShape = null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.clustering.context; import org.apache.axis2.clustering.ClusteringFault; import org.apache.axis2.clustering.context.commands.ContextClusteringCommandCollection; import org.apache.axis2.clustering.context.commands.DeleteServiceGroupContextCommand; import org.apache.axis2.clustering.context.commands.UpdateConfigurationContextCommand; import org.apache.axis2.clustering.context.commands.UpdateContextCommand; import org.apache.axis2.clustering.context.commands.UpdateServiceContextCommand; import org.apache.axis2.clustering.context.commands.UpdateServiceGroupContextCommand; import org.apache.axis2.context.AbstractContext; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.context.PropertyDifference; import org.apache.axis2.context.ServiceContext; import org.apache.axis2.context.ServiceGroupContext; import org.apache.axis2.deployment.DeploymentConstants; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.Serializable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * */ public final class ContextClusteringCommandFactory { private static final Log log = LogFactory.getLog(ContextClusteringCommandFactory.class); public static ContextClusteringCommandCollection getCommandCollection(AbstractContext[] contexts, Map excludedReplicationPatterns) { ArrayList<ContextClusteringCommand> commands = new ArrayList<ContextClusteringCommand>(contexts.length); ContextClusteringCommandCollection collection = new ContextClusteringCommandCollection(commands); for (AbstractContext context : contexts) { ContextClusteringCommand cmd = getUpdateCommand(context, excludedReplicationPatterns, false); if (cmd != null) { commands.add(cmd); } } return collection; } /** * @param context The context * @param excludedPropertyPatterns The property patterns to be excluded * @param includeAllProperties True - Include all properties, * False - Include only property differences * @return ContextClusteringCommand */ public static ContextClusteringCommand getUpdateCommand(AbstractContext context, Map excludedPropertyPatterns, boolean includeAllProperties) { UpdateContextCommand cmd = toUpdateContextCommand(context); if (cmd != null) { fillProperties(cmd, context, excludedPropertyPatterns, includeAllProperties); if (cmd.isPropertiesEmpty()) { cmd = null; } } return cmd; } public static ContextClusteringCommand getUpdateCommand(AbstractContext context, String[] propertyNames) throws ClusteringFault { UpdateContextCommand cmd = toUpdateContextCommand(context); if (cmd != null) { fillProperties(cmd, context, propertyNames); if (cmd.isPropertiesEmpty()) { cmd = null; } } return cmd; } private static UpdateContextCommand toUpdateContextCommand(AbstractContext context) { UpdateContextCommand cmd = null; if (context instanceof ConfigurationContext) { cmd = new UpdateConfigurationContextCommand(); } else if (context instanceof ServiceGroupContext) { ServiceGroupContext sgCtx = (ServiceGroupContext) context; cmd = new UpdateServiceGroupContextCommand(); UpdateServiceGroupContextCommand updateSgCmd = (UpdateServiceGroupContextCommand) cmd; updateSgCmd.setServiceGroupName(sgCtx.getDescription().getServiceGroupName()); updateSgCmd.setServiceGroupContextId(sgCtx.getId()); } else if (context instanceof ServiceContext) { ServiceContext serviceCtx = (ServiceContext) context; cmd = new UpdateServiceContextCommand(); UpdateServiceContextCommand updateServiceCmd = (UpdateServiceContextCommand) cmd; String sgName = serviceCtx.getServiceGroupContext().getDescription().getServiceGroupName(); updateServiceCmd.setServiceGroupName(sgName); updateServiceCmd.setServiceGroupContextId(serviceCtx.getServiceGroupContext().getId()); updateServiceCmd.setServiceName(serviceCtx.getAxisService().getName()); } return cmd; } /** * @param updateCmd The command * @param context The context * @param excludedPropertyPatterns The property patterns to be excluded from replication * @param includeAllProperties True - Include all properties, * False - Include only property differences */ private static void fillProperties(UpdateContextCommand updateCmd, AbstractContext context, Map excludedPropertyPatterns, boolean includeAllProperties) { if (!includeAllProperties) { synchronized (context) { Map diffs = context.getPropertyDifferences(); for (Object o : diffs.keySet()) { String key = (String) o; PropertyDifference diff = (PropertyDifference) diffs.get(key); Object value = diff.getValue(); if (value instanceof Serializable) { // Next check whether it matches an excluded pattern if (!isExcluded(key, context.getClass().getName(), excludedPropertyPatterns)) { if (log.isDebugEnabled()) { log.debug("sending property =" + key + "-" + value); } updateCmd.addProperty(diff); } } } } } else { synchronized (context) { for (Iterator iter = context.getPropertyNames(); iter.hasNext();) { String key = (String) iter.next(); Object value = context.getPropertyNonReplicable(key); if (value instanceof Serializable) { // Next check whether it matches an excluded pattern if (!isExcluded(key, context.getClass().getName(), excludedPropertyPatterns)) { if (log.isDebugEnabled()) { log.debug("sending property =" + key + "-" + value); } PropertyDifference diff = new PropertyDifference(key, value, false); updateCmd.addProperty(diff); } } } } } } private static void fillProperties(UpdateContextCommand updateCmd, AbstractContext context, String[] propertyNames) throws ClusteringFault { Map diffs = context.getPropertyDifferences(); for (String key : propertyNames) { Object prop = context.getPropertyNonReplicable(key); // First check whether it is serializable if (prop instanceof Serializable) { if (log.isDebugEnabled()) { log.debug("sending property =" + key + "-" + prop); } PropertyDifference diff = (PropertyDifference) diffs.get(key); if (diff != null) { diff.setValue(prop); updateCmd.addProperty(diff); // Remove the diff? diffs.remove(key); } } else { String msg = "Trying to replicate non-serializable property " + key + " in context " + context; throw new ClusteringFault(msg); } } } private static boolean isExcluded(String propertyName, String ctxClassName, Map excludedPropertyPatterns) { // Check in the excludes list specific to the context List specificExcludes = (List) excludedPropertyPatterns.get(ctxClassName); boolean isExcluded = false; if (specificExcludes != null) { isExcluded = isExcluded(specificExcludes, propertyName); } if (!isExcluded) { // check in the default excludes List defaultExcludes = (List) excludedPropertyPatterns.get(DeploymentConstants.TAG_DEFAULTS); if (defaultExcludes != null) { isExcluded = isExcluded(defaultExcludes, propertyName); } } return isExcluded; } private static boolean isExcluded(List list, String propertyName) { for (Object aList : list) { String pattern = (String) aList; if (pattern.startsWith("*")) { pattern = pattern.replaceAll("\\*", ""); if (propertyName.endsWith(pattern)) { return true; } } else if (pattern.endsWith("*")) { pattern = pattern.replaceAll("\\*", ""); if (propertyName.startsWith(pattern)) { return true; } } else if (pattern.equals(propertyName)) { return true; } } return false; } public static ContextClusteringCommand getRemoveCommand(AbstractContext abstractContext) { if (abstractContext instanceof ServiceGroupContext) { ServiceGroupContext sgCtx = (ServiceGroupContext) abstractContext; DeleteServiceGroupContextCommand cmd = new DeleteServiceGroupContextCommand(); cmd.setServiceGroupContextId(sgCtx.getId()); return cmd; } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.jms; import org.apache.sling.amq.ActiveMQConnectionFactoryService; import org.apache.sling.amq.ActiveMQConnectionFactoryServiceTest; import org.apache.sling.mom.*; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jms.JMSException; import java.lang.reflect.Field; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import static org.junit.Assert.*; /** * Created by ieb on 31/03/2016. */ public class JMSTopicManagerTest { private static final long MESSAGE_LATENCY = 1000; private static final Logger LOGGER = LoggerFactory.getLogger(JMSTopicManagerTest.class); private JMSTopicManager jsmTopicManager; private ActiveMQConnectionFactoryService amqConnectionFactoryService; private Map<String, Object> testMap; private boolean passed; private long lastSent; @Mock private ServiceReference<Subscriber> serviceReference; @Mock private Bundle bundle; @Mock private BundleContext bundleContext; private Map<String, Object> serviceProperties = new HashMap<String, Object>(); public JMSTopicManagerTest() { MockitoAnnotations.initMocks(this); } @Before public void before() throws NoSuchFieldException, IllegalAccessException, JMSException { Mockito.when(serviceReference.getBundle()).thenReturn(bundle); Mockito.when(bundle.getBundleContext()).thenReturn(bundleContext); Mockito.when(serviceReference.getPropertyKeys()).thenAnswer(new Answer<String[]>() { @Override public String[] answer(InvocationOnMock invocationOnMock) throws Throwable { return (String[]) serviceProperties.keySet().toArray(new String[serviceProperties.size()]); } }); Mockito.when(serviceReference.getProperty(Mockito.anyString())).thenAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { return serviceProperties.get(invocationOnMock.getArguments()[0]); } }); amqConnectionFactoryService = ActiveMQConnectionFactoryServiceTest.activate(null); jsmTopicManager = JMSTopicManagerTest.activate(amqConnectionFactoryService); testMap = JsonTest.createTestMap(); passed = false; } public static JMSTopicManager activate(ActiveMQConnectionFactoryService amqConnectionFactoryService) throws NoSuchFieldException, IllegalAccessException, JMSException { JMSTopicManager jsmTopicManager = new JMSTopicManager(); setPrivate(jsmTopicManager, "connectionFactoryService", amqConnectionFactoryService); jsmTopicManager.activate(new HashMap<String, Object>()); return jsmTopicManager; } private static void setPrivate(Object object, String name, Object value) throws NoSuchFieldException, IllegalAccessException { Field field = object.getClass().getDeclaredField(name); if ( !field.isAccessible()) { field.setAccessible(true); } field.set(object, value); } @After public void after() throws JMSException { JMSTopicManagerTest.deactivate(jsmTopicManager); ActiveMQConnectionFactoryServiceTest.deactivate(amqConnectionFactoryService); } public static void deactivate(JMSTopicManager jsmTopicManager) throws JMSException { jsmTopicManager.deactivate(new HashMap<String, Object>()); } /** * Test a working publish operation, read the message and check all ok. Will try and read the message for 1s. Normally messages * arrive within 15ms. * @throws Exception */ @Test public void testPublish() throws Exception { // make the test map unique. testMap.put("testing", "testPublish" + System.currentTimeMillis()); addSubscriber(new String[]{"testtopic"}, true); jsmTopicManager.publish(Types.topicName("testtopic"), Types.commandName("testcommand"), testMap); lastSent = System.currentTimeMillis(); assertTrue(waitForPassed(MESSAGE_LATENCY)); removeSubscriber(); } private void addSubscriber(String[] topics, boolean match) { Subscriber subscriber = new TestingSubscriber(this, match, topics); serviceProperties.clear(); serviceProperties.put(Subscriber.TOPIC_NAMES_PROP, topics); Mockito.when(bundleContext.getService(Mockito.eq(serviceReference))).thenReturn(subscriber); jsmTopicManager.addSubscriber(serviceReference); } private void removeSubscriber() { jsmTopicManager.removeSubscriber(serviceReference); } /** * Test that a message sent with the wrong topic doesn't arrive, filtered by the topic inside the jmsTopicManager. * @throws Exception */ @Test public void testFilterdByTopic() throws Exception { // make the test map unique. testMap.put("testing", "testFilterdByTopic" + System.currentTimeMillis()); addSubscriber(new String[]{"testtopic"}, false); lastSent = System.currentTimeMillis(); assertFalse(waitForPassed(MESSAGE_LATENCY)); // not expecting a message at all removeSubscriber(); } /** * Check that a message sent to the correct topic is filtered by the MessageFilter. * The test waits 1s for the message to arrive. If testPublish does not fail, message * latency is < 1s. * @throws Exception */ @Test public void testFilterdByFilter() throws Exception { // make the test map unique. testMap.put("testing", "testFilterdByFilter" + System.currentTimeMillis()); addSubscriber(new String[]{"testtopic"}, false); jsmTopicManager.publish(Types.topicName("testtopic"), Types.commandName("testcommand"), testMap); lastSent = System.currentTimeMillis(); assertFalse(waitForPassed(MESSAGE_LATENCY)); // not expecting a message at all removeSubscriber(); } private boolean waitForPassed(long t) { long end = System.currentTimeMillis() + t; while(System.currentTimeMillis() < end) { if (passed) { return true; } else { Thread.yield(); } } LOGGER.info("Message not recieved after "+t+" ms"); return false; } private static class TestingSubscriber implements Subscriber, MessageFilter { private JMSTopicManagerTest test; private final boolean accept; private final Set<Types.Name> topicnames; public TestingSubscriber(JMSTopicManagerTest test, boolean accept, String[] topicname) { this.test = test; this.accept = accept; this.topicnames = new HashSet<Types.Name>(); for(String t : topicname) { topicnames.add(Types.topicName(t)); } } @Override public void onMessage(Types.TopicName topic, Map<String, Object> message) { LOGGER.info("Got message in "+(System.currentTimeMillis()-test.lastSent)+" ms"); JsonTest.checkEquals(test.testMap, message); test.passed = true; } @Override public boolean accept(Types.Name name, Map<String, Object> mapMessage) { return topicnames.contains(name) == accept; } } }
package main.java.wonderland.components.writer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import main.java.wonderland.general.core.Book; import main.java.wonderland.general.core.BookGroup; import main.java.wonderland.general.core.Grade; /** * Serves a a control unit for two lists containing the suggested and currently * selected books. * * @author Lukas Kannenberg * @since 15-10-2015 * @version 15-10-2015 22:50 * */ public class ListControl { private Grade grade; private int defaultSize; private List<Book> suggestedBooks = new ArrayList<>(); private List<Book> selectedBooks = new ArrayList<>(); /** * Constructs a new ListControl. The Grade is used to calculate suggested * books. After construction the lists will be initialized based on the * grades standard. * * @param grade the grade */ public ListControl(Grade grade, int defaultSize) { this.grade = grade; this.defaultSize = defaultSize; initialiseLists(); } /** * Removes a book from the suggested book list and adds it to the selected * book list. The suggestions from the suggested book list will be updated * afterwards. * * @param book the book */ public void switchItem(Book book) { if (suggestedBooks.contains(book)) { suggestedBooks.remove(book); selectedBooks.add(book); } else if (selectedBooks.contains(book)) { selectedBooks.remove(book); suggestedBooks.add(book); } else { System.err.println("The BookItem to switch is not contained in either list."); } updateSuggestedBooks(); } /** * Initializes the lists. */ private void initialiseLists() { if (grade == null) return; if (grade.hasBooks()) { Book[] books = grade.getBooks().getBooks(); selectedBooks.clear(); selectedBooks.addAll(Arrays.asList(books)); } if (grade.hasBookGroups()) { BookGroup[] bookGroups = grade.getBookGroups(); suggestedBooks.clear(); for (BookGroup bookGroup : bookGroups) { suggestedBooks.addAll(Arrays.asList(bookGroup.getBooks())); } } addFromStatistics(defaultSize - selectedBooks.size()); } /** * Updates the suggested books. */ private void updateSuggestedBooks() { if (grade == null) return; if (grade.hasBooks()) { Book[] books = grade.getBooks().getBooks(); for (Book book : books) { if (!selectedBooks.contains(book) && !suggestedBooks.contains(book)) { suggestedBooks.add(book); } } } if (grade.hasBookGroups()) { BookGroup[] bookGroups = grade.getBookGroups(); for (BookGroup bookGroup : bookGroups) { if (getBookAmountSelected(bookGroup) >= bookGroup.getMinimumSelected()) { removeInSuggested(bookGroup); } else { addRemainingInSuggested(bookGroup); } } } addFromStatistics(defaultSize - selectedBooks.size()); } private void addFromStatistics(int amount) { int[] array = { 1000*60*5, 1000*60*10, 1000*60*30 }; HashMap<Book, Integer> amounts = new HashMap<>(); for (Book book : grade.getBooks().getBooks()) { amounts.put(book, 0); for (int i = 0; i < array.length; i++) { int result = StatisticalAnalyt.getAmountPastTime(grade, book, array[i]); amounts.put(book, amounts.get(book) + result); } } //TODO } /** * Searches and returns the amount of BookItems from a specific BookGroup * contained in the selectedBook list. * * @param group the the book group * @return the amount of BookItems from a specific BookGroup contained in * the selectedBook list */ private int getBookAmountSelected(BookGroup group) { int contained = 0; for (Book book : selectedBooks) { if (group.containsBook(book.getID())) { contained++; } } return contained; } /** * Removes all books of a book group in the suggested book list. * * @param group the book group */ private void removeInSuggested(BookGroup group) { for (Book bookItem : group.getBooks()) { if (suggestedBooks.contains(bookItem)) { suggestedBooks.remove(bookItem); } } } /** * Adds each book of a book group to the selected book list if it is not * contained in the suggested book list nor in the contained book list. * * @param group */ private void addRemainingInSuggested(BookGroup group) { for (Book bookItem : group.getBooks()) { if (!suggestedBooks.contains(bookItem)) { suggestedBooks.add(bookItem); } } } /** * @return the grade */ public Grade getGrade() { return grade; } /** * @param grade the grade */ public void setGrade(Grade grade) { this.grade = grade; initialiseLists(); } /** * @param book the book to add */ public void addInSelected(Book book) { selectedBooks.add(book); updateSuggestedBooks(); } /** * @param book the book */ public void removeInSelected(Book book) { selectedBooks.remove(book); updateSuggestedBooks(); } /** * @return the suggested books */ public Book[] getSuggestedBooks() { return suggestedBooks.toArray(new Book[0]); } /** * @return the suggested books */ public Book[] getSelectedBooks() { return selectedBooks.toArray(new Book[0]); } }
/* // Licensed to DynamoBI Corporation (DynamoBI) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. DynamoBI licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. */ package org.eigenbase.test; import java.nio.charset.*; import java.util.List; import java.util.regex.*; import junit.framework.*; import org.eigenbase.reltype.*; import org.eigenbase.sql.*; import org.eigenbase.sql.fun.*; import org.eigenbase.sql.parser.*; import org.eigenbase.sql.test.*; import org.eigenbase.sql.type.*; import org.eigenbase.sql.validate.*; import org.eigenbase.util.*; /** * An abstract base class for implementing tests against {@link SqlValidator}. * * <p>A derived class can refine this test in two ways. First, it can add <code> * testXxx()</code> methods, to test more functionality. * * <p>Second, it can override the {@link #getTester} method to return a * different implementation of the {@link Tester} object. This encapsulates the * differences between test environments, for example, which SQL parser or * validator to use.</p> * * @author Wael Chatila * @version $Id$ * @since Jan 12, 2004 */ public class SqlValidatorTestCase extends TestCase { //~ Static fields/initializers --------------------------------------------- protected static final String NL = System.getProperty("line.separator"); private static final Pattern lineColPattern = Pattern.compile("At line ([0-9]+), column ([0-9]+)"); private static final Pattern lineColTwicePattern = Pattern.compile( "(?s)From line ([0-9]+), column ([0-9]+) to line ([0-9]+), column ([0-9]+): (.*)"); //~ Instance fields -------------------------------------------------------- protected final Tester tester; //~ Constructors ----------------------------------------------------------- /** * Creates a testcase. * * <p>If name is of the form "compatible:testCase", invokes sets the * compatibility to "compatible". * * @param name Name; examples "Sql2003:testGroup", "testGroup" */ public SqlValidatorTestCase(String name) { super(splitName(name)); SqlConformance conformance = splitConformance(name); if (conformance == null) { conformance = SqlConformance.Default; } this.tester = getTester(conformance); } //~ Methods ---------------------------------------------------------------- private static String splitName(String name) { int colon = name.indexOf(':'); if (colon < 0) { return name; } else { return name.substring(colon); } } private static SqlConformance splitConformance(String name) { int colon = name.indexOf(':'); if (colon < 0) { return null; } else { String conformanceName = name.substring(0, colon); return SqlConformance.valueOf(conformanceName); } } /** * Returns a tester. Derived classes should override this method to run the * same set of tests in a different testing environment. * * @param conformance Language version tests should check compatibility with */ public Tester getTester(SqlConformance conformance) { return new TesterImpl(conformance); } public void check(String sql) { tester.assertExceptionIsThrown(sql, null); } public void checkExp(String sql) { tester.assertExceptionIsThrown( TesterImpl.buildQuery(sql), null); } /** * Checks that a SQL query gives a particular error, or succeeds if {@code * expected} is null. */ public final void checkFails( String sql, String expected) { tester.assertExceptionIsThrown(sql, expected); } /** * Checks that a SQL expression gives a particular error. */ public final void checkExpFails( String sql, String expected) { tester.assertExceptionIsThrown( TesterImpl.buildQuery(sql), expected); } /** * Checks that a SQL expression gives a particular error, and that the * location of the error is the whole expression. */ public final void checkWholeExpFails( String sql, String expected) { assert sql.indexOf('^') < 0; checkExpFails("^" + sql + "^", expected); } public void checkExpType( String sql, String expected) { checkColumnType( TesterImpl.buildQuery(sql), expected); } /** * Checks that a query returns a single column, and that the column has the * expected type. For example, * * <blockquote><code>checkColumnType("SELECT empno FROM Emp", "INTEGER NOT * NULL");</code></blockquote> * * @param sql Query * @param expected Expected type, including nullability */ public void checkColumnType( String sql, String expected) { tester.checkColumnType(sql, expected); } /** * Checks that a query returns a row of the expected type. For example, * * <blockquote><code>checkResultType("select empno, name from emp","{EMPNO * INTEGER NOT NULL, NAME VARCHAR(10) NOT NULL}");</code></blockquote> * * @param sql Query * @param expected Expected row type */ public void checkResultType( String sql, String expected) { tester.checkResultType(sql, expected); } /** * Checks that the first column returned by a query has the expected type. * For example, * * <blockquote><code>checkQueryType("SELECT empno FROM Emp", "INTEGER NOT * NULL");</code></blockquote> * * @param sql Query * @param expected Expected type, including nullability */ public void checkIntervalConv( String sql, String expected) { tester.checkIntervalConv( TesterImpl.buildQuery(sql), expected); } protected final void assertExceptionIsThrown( String sql, String expectedMsgPattern) { tester.assertExceptionIsThrown(sql, expectedMsgPattern); } public void checkCharset( String sql, Charset expectedCharset) { tester.checkCharset(sql, expectedCharset); } public void checkCollation( String sql, String expectedCollationName, SqlCollation.Coercibility expectedCoercibility) { tester.checkCollation(sql, expectedCollationName, expectedCoercibility); } /** * Checks whether an exception matches the expected pattern. If <code> * sap</code> contains an error location, checks this too. * * @param ex Exception thrown * @param expectedMsgPattern Expected pattern * @param sap Query and (optional) position in query */ public static void checkEx( Throwable ex, String expectedMsgPattern, SqlParserUtil.StringAndPos sap) { if (null == ex) { if (expectedMsgPattern == null) { // No error expected, and no error happened. return; } else { throw new AssertionFailedError( "Expected query to throw exception, but it did not; " + "query [" + sap.sql + "]; expected [" + expectedMsgPattern + "]"); } } Throwable actualException = ex; String actualMessage = actualException.getMessage(); int actualLine = -1; int actualColumn = -1; int actualEndLine = 100; int actualEndColumn = 99; // Search for an EigenbaseContextException somewhere in the stack. EigenbaseContextException ece = null; for (Throwable x = ex; x != null; x = x.getCause()) { if (x instanceof EigenbaseContextException) { ece = (EigenbaseContextException) x; break; } if (x.getCause() == x) { break; } } // Search for a SqlParseException -- with its position set -- somewhere // in the stack. SqlParseException spe = null; for (Throwable x = ex; x != null; x = x.getCause()) { if ((x instanceof SqlParseException) && (((SqlParseException) x).getPos() != null)) { spe = (SqlParseException) x; break; } if (x.getCause() == x) { break; } } if (ece != null) { actualLine = ece.getPosLine(); actualColumn = ece.getPosColumn(); actualEndLine = ece.getEndPosLine(); actualEndColumn = ece.getEndPosColumn(); if (ece.getCause() != null) { actualException = ece.getCause(); actualMessage = actualException.getMessage(); } } else if (spe != null) { actualLine = spe.getPos().getLineNum(); actualColumn = spe.getPos().getColumnNum(); actualEndLine = spe.getPos().getEndLineNum(); actualEndColumn = spe.getPos().getEndColumnNum(); if (spe.getCause() != null) { actualException = spe.getCause(); actualMessage = actualException.getMessage(); } } else { final String message = ex.getMessage(); if (message != null) { Matcher matcher = lineColTwicePattern.matcher(message); if (matcher.matches()) { actualLine = Integer.parseInt(matcher.group(1)); actualColumn = Integer.parseInt(matcher.group(2)); actualEndLine = Integer.parseInt(matcher.group(3)); actualEndColumn = Integer.parseInt(matcher.group(4)); actualMessage = matcher.group(5); } else { matcher = lineColPattern.matcher(message); if (matcher.matches()) { actualLine = Integer.parseInt(matcher.group(1)); actualColumn = Integer.parseInt(matcher.group(2)); } } } } if (null == expectedMsgPattern) { if (null != actualException) { actualException.printStackTrace(); fail( "Validator threw unexpected exception" + "; query [" + sap.sql + "]; exception [" + actualMessage + "]; pos [line " + actualLine + " col " + actualColumn + " thru line " + actualLine + " col " + actualColumn + "]"); } } else if (null != expectedMsgPattern) { if (null == actualException) { fail( "Expected validator to throw " + "exception, but it did not; query [" + sap.sql + "]; expected [" + expectedMsgPattern + "]"); } else { String sqlWithCarets; if ((actualColumn <= 0) || (actualLine <= 0) || (actualEndColumn <= 0) || (actualEndLine <= 0)) { if (sap.pos != null) { throw new AssertionFailedError( "Expected error to have position," + " but actual error did not: " + " actual pos [line " + actualLine + " col " + actualColumn + " thru line " + actualEndLine + " col " + actualEndColumn + "]"); } sqlWithCarets = sap.sql; } else { sqlWithCarets = SqlParserUtil.addCarets( sap.sql, actualLine, actualColumn, actualEndLine, actualEndColumn + 1); if (sap.pos == null) { throw new AssertionFailedError( "Actual error had a position, but expected error" + " did not. Add error position carets to sql:\n" + sqlWithCarets); } } if ((actualMessage == null) || !actualMessage.matches(expectedMsgPattern)) { actualException.printStackTrace(); final String actualJavaRegexp = (actualMessage == null) ? "null" : TestUtil.quoteForJava( TestUtil.quotePattern(actualMessage)); fail( "Validator threw different " + "exception than expected; query [" + sap.sql + "];" + NL + " expected pattern [" + expectedMsgPattern + "];" + NL + " actual [" + actualMessage + "];" + NL + " actual as java regexp [" + actualJavaRegexp + "]; pos [" + actualLine + " col " + actualColumn + " thru line " + actualEndLine + " col " + actualEndColumn + "]; sql [" + sqlWithCarets + "]"); } else if ( (sap.pos != null) && ((actualLine != sap.pos.getLineNum()) || (actualColumn != sap.pos.getColumnNum()) || (actualEndLine != sap.pos.getEndLineNum()) || (actualEndColumn != sap.pos.getEndColumnNum()))) { fail( "Validator threw expected " + "exception [" + actualMessage + "];\nbut at pos [line " + actualLine + " col " + actualColumn + " thru line " + actualEndLine + " col " + actualEndColumn + "];\nsql [" + sqlWithCarets + "]"); } } } } //~ Inner Interfaces ------------------------------------------------------- /** * Encapsulates differences between test environments, for example, which * SQL parser or validator to use. * * <p>It contains a mock schema with <code>EMP</code> and <code>DEPT</code> * tables, which can run without having to start up Farrago. */ public interface Tester { SqlNode parseQuery(String sql) throws SqlParseException; SqlNode parseAndValidate(SqlValidator validator, String sql); SqlValidator getValidator(); /** * Checks that a query is valid, or, if invalid, throws the right * message at the right location. * * <p>If <code>expectedMsgPattern</code> is null, the query must * succeed. * * <p>If <code>expectedMsgPattern</code> is not null, the query must * fail, and give an error location of (expectedLine, expectedColumn) * through (expectedEndLine, expectedEndColumn). * * @param sql SQL statement * @param expectedMsgPattern If this parameter is null the query must be * valid for the test to pass; If this parameter is not null the query * must be malformed and the message given must match the pattern */ void assertExceptionIsThrown( String sql, String expectedMsgPattern); /** * Returns the data type of the sole column of a SQL query. * * <p>For example, <code>getResultType("VALUES (1")</code> returns * <code>INTEGER</code>. * * <p>Fails if query returns more than one column. * * @see #getResultType(String) */ RelDataType getColumnType(String sql); /** * Returns the data type of the row returned by a SQL query. * * <p>For example, <code>getResultType("VALUES (1, 'foo')")</code> * returns <code>RecordType(INTEGER EXPR$0, CHAR(3) EXPR#1)</code>. */ RelDataType getResultType(String sql); void checkCollation( String sql, String expectedCollationName, SqlCollation.Coercibility expectedCoercibility); void checkCharset( String sql, Charset expectedCharset); /** * Checks that a query returns one column of an expected type. For * example, <code>checkType("VALUES (1 + 2)", "INTEGER NOT * NULL")</code>. */ void checkColumnType( String sql, String expected); /** * Given a SQL query, returns a list of the origins of each result * field. * * @param sql SQL query * @param fieldOriginList Field origin list, e.g. * "{(CATALOG.SALES.EMP.EMPNO, null)}" */ void checkFieldOrigin(String sql, String fieldOriginList); /** * Checks that a query gets rewritten to an expected form. * * @param validator validator to use; null for default * @param query query to test * @param expectedRewrite expected SQL text after rewrite and unparse */ void checkRewrite( SqlValidator validator, String query, String expectedRewrite); /** * Checks that a query returns one column of an expected type. For * example, <code>checkType("select empno, name from emp""{EMPNO INTEGER * NOT NULL, NAME VARCHAR(10) NOT NULL}")</code>. */ void checkResultType( String sql, String expected); /** * Checks if the interval value conversion to milliseconds is valid. For * example, <code>checkIntervalConv(VALUES (INTERVAL '1' Minute), * "60000")</code>. */ void checkIntervalConv( String sql, String expected); /** * Given a SQL query, returns the monotonicity of the first item in the * SELECT clause. * * @param sql SQL query * * @return Monotonicity */ SqlMonotonicity getMonotonicity(String sql); SqlConformance getConformance(); } //~ Inner Classes ---------------------------------------------------------- /** * Implementation of {@link org.eigenbase.test.SqlValidatorTestCase.Tester} * which talks to a mock catalog. * * <p>It is also a pure-Java implementation of the {@link SqlTester} used by * {@link SqlOperatorTests}. It can parse and validate queries, but it does * not invoke Farrago, so it is very fast but cannot execute functions. */ public static class TesterImpl implements Tester, SqlTester { protected final SqlOperatorTable opTab; protected final SqlConformance conformance; public TesterImpl(SqlConformance conformance) { assert conformance != null; this.conformance = conformance; this.opTab = createOperatorTable(); } public SqlConformance getConformance() { return conformance; } protected SqlOperatorTable createOperatorTable() { MockSqlOperatorTable opTab = new MockSqlOperatorTable(SqlStdOperatorTable.instance()); MockSqlOperatorTable.addRamp(opTab); return opTab; } public SqlValidator getValidator() { final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(); return SqlValidatorUtil.newValidator( opTab, new MockCatalogReader(typeFactory), typeFactory); } public void assertExceptionIsThrown( String sql, String expectedMsgPattern) { SqlValidator validator; SqlNode sqlNode; SqlParserUtil.StringAndPos sap = SqlParserUtil.findPos(sql); try { sqlNode = parseQuery(sap.sql); validator = getValidator(); } catch (SqlParseException e) { String errMessage = e.getMessage(); if (expectedMsgPattern == null) { e.printStackTrace(); throw new AssertionFailedError( "Error while parsing query [" + sap.sql + "]"); } else if ( (null == errMessage) || !errMessage.matches(expectedMsgPattern)) { e.printStackTrace(); throw new AssertionFailedError( "Error did not match expected [" + expectedMsgPattern + "] while parsing query [" + sap.sql + "]"); } return; } catch (Throwable e) { e.printStackTrace(); throw new AssertionFailedError( "Error while parsing query [" + sap.sql + "]"); } Throwable thrown = null; try { validator.validate(sqlNode); } catch (Throwable ex) { thrown = ex; } checkEx(thrown, expectedMsgPattern, sap); } public RelDataType getColumnType(String sql) { RelDataType rowType = getResultType(sql); final RelDataTypeField [] fields = rowType.getFields(); assertEquals("expected query to return 1 field", 1, fields.length); RelDataType actualType = fields[0].getType(); return actualType; } public RelDataType getResultType(String sql) { SqlValidator validator = getValidator(); SqlNode n = parseAndValidate(validator, sql); RelDataType rowType = validator.getValidatedNodeType(n); return rowType; } public SqlNode parseAndValidate(SqlValidator validator, String sql) { if (validator == null) { validator = getValidator(); } SqlNode sqlNode; try { sqlNode = parseQuery(sql); } catch (SqlParseException e) { e.printStackTrace(); throw new AssertionFailedError( "Error while parsing query [" + sql + "]"); } catch (Throwable e) { e.printStackTrace(); throw new AssertionFailedError( "Error while parsing query [" + sql + "]"); } return validator.validate(sqlNode); } public SqlNode parseQuery(String sql) throws SqlParseException { SqlParser parser = new SqlParser(sql); return parser.parseQuery(); } public void checkColumnType(String sql, String expected) { RelDataType actualType = getColumnType(sql); String actual = AbstractSqlTester.getTypeString(actualType); assertEquals(expected, actual); } public void checkFieldOrigin(String sql, String fieldOriginList) { SqlValidator validator = getValidator(); SqlNode n = parseAndValidate(validator, sql); final List<List<String>> list = validator.getFieldOrigins(n); final StringBuilder buf = new StringBuilder("{"); int i = 0; for (List<String> strings : list) { if (i++ > 0) { buf.append(", "); } if (strings == null) { buf.append("null"); } else { int j = 0; for (String s : strings) { if (j++ > 0) { buf.append('.'); } buf.append(s); } } } buf.append("}"); assertEquals(fieldOriginList, buf.toString()); } public void checkResultType(String sql, String expected) { RelDataType actualType = getResultType(sql); String actual = AbstractSqlTester.getTypeString(actualType); assertEquals(expected, actual); } public void checkIntervalConv(String sql, String expected) { SqlValidator validator = getValidator(); final SqlSelect n = (SqlSelect) parseAndValidate(validator, sql); SqlNode node = null; for (int i = 0; i < n.getOperands().length; i++) { node = n.getOperands()[i]; if (node instanceof SqlCall) { if (node.getKind() == SqlKind.AS) { node = ((SqlCall) node).operands[0]; } node = ((SqlCall) ((SqlCall) node).getOperands()[0]) .getOperands()[0]; break; } } SqlIntervalLiteral.IntervalValue interval = (SqlIntervalLiteral.IntervalValue) ((SqlIntervalLiteral) node) .getValue(); long l = interval.getIntervalQualifier().isYearMonth() ? SqlParserUtil.intervalToMonths(interval) : SqlParserUtil.intervalToMillis(interval); String actual = l + ""; assertEquals(expected, actual); } public void checkType(String expression, String type) { checkColumnType( buildQuery(expression), type); } public void checkCollation( String sql, String expectedCollationName, SqlCollation.Coercibility expectedCoercibility) { RelDataType actualType = getColumnType(buildQuery(sql)); SqlCollation collation = actualType.getCollation(); assertEquals(expectedCollationName, collation.getCollationName()); assertEquals(expectedCoercibility, collation.getCoercibility()); } public void checkCharset( String sql, Charset expectedCharset) { RelDataType actualType = getColumnType(buildQuery(sql)); Charset actualCharset = actualType.getCharset(); if (!expectedCharset.equals(actualCharset)) { fail( NL + "Expected=" + expectedCharset.name() + NL + " actual=" + actualCharset.name()); } } // SqlTester methods public void setFor( SqlOperator operator, VmName ... unimplementedVmNames) { // do nothing } public void checkAgg( String expr, String [] inputValues, Object result, double delta) { String query = AbstractSqlTester.generateAggQuery(expr, inputValues); check(query, AbstractSqlTester.AnyTypeChecker, result, delta); } public void checkWinAgg( String expr, String [] inputValues, String windowSpec, String type, Object result, double delta) { String query = AbstractSqlTester.generateWinAggQuery( expr, windowSpec, inputValues); check(query, AbstractSqlTester.AnyTypeChecker, result, delta); } public void checkScalar( String expression, Object result, String resultType) { checkType(expression, resultType); check( buildQuery(expression), AbstractSqlTester.AnyTypeChecker, result, 0); } public void checkScalarExact( String expression, String result) { String sql = buildQuery(expression); check(sql, AbstractSqlTester.IntegerTypeChecker, result, 0); } public void checkScalarExact( String expression, String expectedType, String result) { String sql = buildQuery(expression); TypeChecker typeChecker = new AbstractSqlTester.StringTypeChecker(expectedType); check(sql, typeChecker, result, 0); } public void checkScalarApprox( String expression, String expectedType, double expectedResult, double delta) { String sql = buildQuery(expression); TypeChecker typeChecker = new AbstractSqlTester.StringTypeChecker(expectedType); check( sql, typeChecker, new Double(expectedResult), delta); } public void checkBoolean( String expression, Boolean result) { String sql = buildQuery(expression); if (null == result) { checkNull(expression); } else { check( sql, AbstractSqlTester.BooleanTypeChecker, result.toString(), 0); } } public void checkString( String expression, String result, String expectedType) { String sql = buildQuery(expression); TypeChecker typeChecker = new AbstractSqlTester.StringTypeChecker(expectedType); check(sql, typeChecker, result, 0); } public void checkNull(String expression) { String sql = buildQuery(expression); check(sql, AbstractSqlTester.AnyTypeChecker, null, 0); } public final void check( String query, TypeChecker typeChecker, Object result, double delta) { check( query, typeChecker, AbstractSqlTester.createChecker(result, delta)); } public void check( String query, TypeChecker typeChecker, ResultChecker resultChecker) { // This implementation does NOT check the result! // (It can't because we're pure Java.) // All it does is check the return type. // Parse and validate. There should be no errors. RelDataType actualType = getColumnType(query); // Check result type. typeChecker.checkType(actualType); } public void checkRewrite( SqlValidator validator, String query, String expectedRewrite) { SqlNode rewrittenNode = parseAndValidate(validator, query); String actualRewrite = rewrittenNode.toSqlString(SqlDialect.DUMMY, false).getSql(); TestUtil.assertEqualsVerbose(expectedRewrite, actualRewrite); } public void checkFails( String expression, String expectedError, boolean runtime) { if (runtime) { // We need to test that the expression fails at runtime. // Ironically, that means that it must succeed at prepare time. SqlValidator validator = getValidator(); final String sql = buildQuery(expression); SqlNode n = parseAndValidate(validator, sql); assertNotNull(n); } else { assertExceptionIsThrown( buildQuery(expression), expectedError); } } public SqlMonotonicity getMonotonicity(String sql) { final SqlValidator validator = getValidator(); final SqlNode node = parseAndValidate(validator, sql); final SqlSelect select = (SqlSelect) node; final SqlNode selectItem0 = select.getSelectList().get(0); final SqlValidatorScope scope = validator.getSelectScope(select); return selectItem0.getMonotonicity(scope); } private static String buildQuery(String expression) { return "values (" + expression + ")"; } public boolean isVm(VmName vmName) { return false; } } } // End SqlValidatorTestCase.java
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.coi.notesandattachments.attachments; import org.apache.commons.lang3.StringUtils; import org.apache.struts.upload.FormFile; import org.kuali.coeus.common.framework.attachment.AttachmentFile; import org.kuali.coeus.sys.framework.service.KcServiceLocator; import org.kuali.kra.SkipVersioning; import org.kuali.kra.coi.CoiAttachmentType; import org.kuali.kra.coi.CoiDisclProject; import org.kuali.kra.coi.CoiDisclosure; import org.kuali.kra.coi.CoiDisclosureAssociate; import org.kuali.kra.coi.personfinancialentity.PersonFinIntDisclosure; import org.kuali.rice.core.api.CoreApiServiceLocator; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.util.ObjectUtils; import java.sql.Timestamp; import java.util.HashMap; import java.util.List; import java.util.Map; public class CoiDisclosureAttachment extends CoiDisclosureAssociate implements Comparable<CoiDisclosureAttachment>{ private static final long serialVersionUID = 8722598360752485817L; private Long attachmentId; private Long fileId; private Integer documentId; private transient AttachmentFile file; private transient FormFile newFile; @SkipVersioning private transient String updateUserFullName; private Long coiDisclosureId; private String description; private String contactName; private String contactEmailAddress; private String contactPhoneNumber; private String comments; private String documentStatusCode; private Timestamp createTimestamp; private String projectId; private String eventTypeCode; private Long originalCoiDisclosureId; @SkipVersioning private CoiDisclosure originalCoiDisclosure; private Long financialEntityId; private String typeCode; private CoiAttachmentType coiAttachmentType; private final String MESSAGE_UPDATED_BY = "message.updated.by"; @SkipVersioning private PersonFinIntDisclosure financialEntity; private static String updatedByString; private transient Long attachmentIdForPermission; private String usageSectionId; public CoiDisclosureAttachment() { super(); } public CoiDisclosureAttachment(CoiDisclosure coiDisclosure) { this.setCoiDisclosure(coiDisclosure); } public Long getFinancialEntityId() { return financialEntityId; } public void setFinancialEntityId(Long financialEntityId) { this.financialEntityId = financialEntityId; } public PersonFinIntDisclosure getFinancialEntity() { return financialEntity; } public void setFinancialEntity(PersonFinIntDisclosure financialEntity) { this.financialEntity = financialEntity; } public String getFinancialEntityName() { refreshReferenceObject("financialEntity"); return ObjectUtils.isNotNull(getFinancialEntity()) ? getFinancialEntity().getEntityName() : ""; } public String getEventTypeCode() { return eventTypeCode; } public void setEventTypeCode(String eventTypeCode) { this.eventTypeCode = eventTypeCode; } public String getProjectId() { return projectId; } public void setProjectId(String projectId) { this.projectId = projectId; } public Timestamp getCreateTimestamp() { return createTimestamp; } public void setCreateTimestamp(Timestamp createTimestamp) { this.createTimestamp = createTimestamp; } public String getContactEmailAddress() { return contactEmailAddress; } public void setContactEmailAddress(String contactEmailAddress) { this.contactEmailAddress = contactEmailAddress; } public String getContactPhoneNumber() { return contactPhoneNumber; } public void setContactPhoneNumber(String contactPhoneNumber) { this.contactPhoneNumber = contactPhoneNumber; } public String getContactName() { return contactName; } public void setContactName(String contactName) { this.contactName = contactName; } public String getComments() { return comments; } public void setComments(String comments) { this.comments = comments; } public Long getAttachmentId() { return attachmentId; } public void setAttachmentId(Long attachmentId) { this.attachmentId = attachmentId; if (attachmentId != null) { this.attachmentIdForPermission = attachmentId; } } public Long getAttachmentIdForPermission() { if (attachmentIdForPermission == null) { attachmentIdForPermission = getAttachmentId(); } return attachmentIdForPermission; } public void setAttachmentIdForPermission(Long attachmentId) { this.attachmentIdForPermission = attachmentId; } public Long getFileId() { return fileId; } public void setFileId(Long fileId) { this.fileId = fileId; } public Integer getDocumentId() { return documentId; } public void setDocumentId(Integer documentId) { this.documentId = documentId; } public AttachmentFile getFile() { return file; } public void setFile(AttachmentFile file) { this.file = file; } public FormFile getNewFile() { return newFile; } public void setNewFile(FormFile newFile) { this.newFile = newFile; } public String getUpdateUserFullName() { return updateUserFullName; } public void setUpdateUserFullName(String updateUserFullName) { this.updateUserFullName = updateUserFullName; } public Long getCoiDisclosureId() { return coiDisclosureId; } public void setCoiDisclosureId(Long coiDisclosureId) { this.coiDisclosureId = coiDisclosureId; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public void setDocumentStatusCode(String documentStatusCode) { this.documentStatusCode = documentStatusCode; } public String getDocumentStatusCode() { return documentStatusCode; } public CoiAttachmentType getCoiAttachmentType() { return coiAttachmentType; } public void setCoiAttachmentType(CoiAttachmentType coiAttachmentType) { this.coiAttachmentType = coiAttachmentType; } public String getTypeCode() { return typeCode; } public void setTypeCode(String typeCode) { this.typeCode = typeCode; } public static void addAttachmentToCollection(CoiDisclosureAttachment coiDisclosureAttachment, List<CoiDisclosureAttachment> coiDisclosureAttachments) { if (coiDisclosureAttachment == null) { throw new IllegalArgumentException("the attachment is null"); } if (coiDisclosureAttachments == null) { throw new IllegalArgumentException("the toList is null"); } coiDisclosureAttachments.add(coiDisclosureAttachment); } public void setUpdateUser(String updateUser) { if (updateUser == null || getUpdateUser() == null ) { super.setUpdateUser(updateUser); } } public String getProjectName() { refreshReferenceObject("coiDisclProjects"); for (CoiDisclProject project : getCoiDisclosure().getCoiDisclProjects()) { if (StringUtils.equalsIgnoreCase(project.getProjectId(), getProjectId())) { return project.getCoiProjectTitle(); } } return ""; } @Override public int compareTo(CoiDisclosureAttachment arg0) { return 0; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } CoiDisclosureAttachment other = (CoiDisclosureAttachment) obj; if (description == null) { if (other.description != null) { return false; } } else if (!description.equals(other.description)) { return false; } if (documentId == null) { if (other.documentId != null) { return false; } } else if (!documentId.equals(other.documentId)) { return false; } if (this.file == null) { if (other.file != null) { return false; } } else if (!this.file.equals(other.file)) { return false; } if (this.fileId == null) { if (other.fileId != null) { return false; } } else if (!this.fileId.equals(other.fileId)) { return false; } return true; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((description == null) ? 0 : description.hashCode()); result = prime * result + ((documentId == null) ? 0 : documentId.hashCode()); result = prime * result + ((this.file == null) ? 0 : this.file.hashCode()); result = prime * result + ((this.fileId == null) ? 0 : this.fileId.hashCode()); return result; } public Long getOriginalCoiDisclosureId() { return originalCoiDisclosureId; } public void setOriginalCoiDisclosureId(Long originalCoiDisclosureId) { this.originalCoiDisclosureId = originalCoiDisclosureId; } public CoiDisclosure getOriginalCoiDisclosure() { return originalCoiDisclosure; } public void setOriginalCoiDisclosure(CoiDisclosure originalCoiDisclosure) { this.originalCoiDisclosure = originalCoiDisclosure; } public String getShortDescription() { String result = StringUtils.abbreviate(getDescription(), 20) + ": " + StringUtils.abbreviate(getFile().getName(), 20); if (!StringUtils.isEmpty(getUpdateUser())) { result += ": " + getUpdatedByString() + " " + getUpdateUser(); } return result; } private String getUpdatedByString() { if (updatedByString == null) { updatedByString = CoreApiServiceLocator.getKualiConfigurationService().getPropertyValueAsString(MESSAGE_UPDATED_BY); } return updatedByString; } protected void postRemove() { //if there aren't another other attachments to the actual file, then delete. Map<String, Object> values = new HashMap<String, Object>(); values.put("fileId", getFileId()); BusinessObjectService boService = KcServiceLocator.getService(BusinessObjectService.class); if (boService.countMatching(CoiDisclosureAttachment.class, values) == 1) { boService.delete(getFile()); } } public String getUsageSectionId() { return usageSectionId; } public void setUsageSectionId(String usageSectionId) { this.usageSectionId = usageSectionId; } }
/** * Copyright 2005 Cordys R&D B.V. * * This file is part of the Cordys File Connector. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cordys.coe.ac.fileconnector.extensions.directorypoller.statelog; import com.cordys.coe.ac.fileconnector.extensions.directorypoller.FileContext; import com.cordys.coe.ac.fileconnector.extensions.directorypoller.LogMessages; import com.cordys.coe.ac.fileconnector.extensions.directorypoller.Utils; import com.cordys.coe.ac.fileconnector.extensions.directorypoller.states.EFileState; import com.cordys.coe.ac.fileconnector.extensions.directorypoller.states.FileStateException; import com.cordys.coe.ac.fileconnector.extensions.directorypoller.states.IFileState; import com.eibus.util.logger.CordysLogger; import com.eibus.util.logger.Severity; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.util.Date; import javax.xml.stream.XMLStreamWriter; /** * Implements a file state log file which is used to keep track of file's state in the processing * folder. This is needed to be able to check if a SOAP request has been sent for this file when * file processing is resumed after a restart (e.g. after a crash). * * @author mpoyhone */ public class StateLog_ProcessingFolder implements IStateLogReader, IStateLogWriter { /** * The log file name. */ public static final String LOGFILE_NAME = "__FC_STATELOG.log"; /** * Logger for log messages from this class. */ private static final CordysLogger LOG = CordysLogger.getCordysLogger(StateLog_ProcessingFolder.class); /** * Marker value for the entry start. */ private static final int ENTRY_START_MARKER = 0x57; /** * Marker value for the entry end. */ private static final int ENTRY_END_MARKER = 0xED; /** * Debug flag for disabling logging. */ private static boolean enabled = true; /** * File object used for writing when the file is open. */ private RandomAccessFile file = null; /** * File position last start entry. Used for writing the finished timestamp. */ private long lastStartEntryPos = -1; /** * Contains the log file path. */ private File logFile; /** * Factory method for creating a new instance. * * @param logFile Log gile. * * @return New instance. */ public static StateLog_ProcessingFolder getInstance(File logFile) { if (!logFile.exists()) { return null; } StateLog_ProcessingFolder log = new StateLog_ProcessingFolder(); log.logFile = logFile; return log; } /** * Factory method for creating a new instance. * * @param ctx File context. * @param create If <code>true</code> the log file can be created. * * @return New instance. */ public static StateLog_ProcessingFolder getInstance(FileContext ctx, boolean create) { File folder = ctx.getProcessingFolder(); if ((folder == null) || !folder.exists()) { return null; } File logFile = new File(folder, LOGFILE_NAME); if (!logFile.exists() && !create) { return null; } StateLog_ProcessingFolder log = new StateLog_ProcessingFolder(); log.logFile = logFile; return log; } /** * @see com.cordys.coe.ac.fileconnector.extensions.directorypoller.statelog.IStateLogWriter#close() */ public void close() { if (file != null) { try { file.close(); } catch (Exception e) { LOG.log(Severity.WARN, "Log file closing failed: " + logFile, e); } file = null; } } /** * @see com.cordys.coe.ac.fileconnector.extensions.directorypoller.statelog.IStateLogWriter#finishLogEntry() */ public void finishLogEntry() throws FileStateException { if (lastStartEntryPos == -1) { throw new FileStateException(FileStateException.EType.INTERNAL, LogMessages.NO_START_ENTRY_WRITTEN); } try { long timeStamp = System.currentTimeMillis(); long offset = 11; long current; // Move the file pointer to the finished timestamp. current = file.getFilePointer(); file.seek(lastStartEntryPos + offset); file.writeLong(timeStamp); file.seek(current); } catch (Exception e) { throw new FileStateException(e,FileStateException.EType.ABORT, LogMessages.UNABLE_TO_WIRTE_STATE_TIMESTAMP, logFile); } lastStartEntryPos = -1; } /** * @see com.cordys.coe.ac.fileconnector.extensions.directorypoller.statelog.IStateLogWriter#open(boolean) */ public void open(boolean forWriting) throws FileStateException { if (file != null) { return; } long pos; try { file = new RandomAccessFile(logFile, forWriting ? "rws" : "r"); if (forWriting) { pos = file.length(); if (pos > 0) { file.seek(pos); } } } catch (FileNotFoundException e) { throw new FileStateException(e,FileStateException.EType.ABORT, LogMessages.UNABLE_TO_OPEN_STATE_LOG_FILE,logFile); } catch (IOException e) { throw new FileStateException(e,FileStateException.EType.ABORT, LogMessages.UNABLE_TO_READ_STATE_LOG_FILE,logFile); } } /** * @see com.cordys.coe.ac.fileconnector.extensions.directorypoller.statelog.IStateLogReader#readLog(com.cordys.coe.ac.fileconnector.extensions.directorypoller.FileContext, * javax.xml.stream.XMLStreamWriter) */ public IFileState readLog(FileContext fileContext, XMLStreamWriter xmlWriter) throws FileStateException { if (logFile.exists()) { close(); return parseLogFile(fileContext, xmlWriter); } else { throw new FileStateException(FileStateException.EType.ABORT, LogMessages.LOG_FILE_NOT_EXIST,logFile); } } /** * @see com.cordys.coe.ac.fileconnector.extensions.directorypoller.statelog.IStateLogWriter#startLogEntry(com.cordys.coe.ac.fileconnector.extensions.directorypoller.states.IFileState, * boolean) */ public void startLogEntry(IFileState state, boolean finished) throws FileStateException { if (!enabled) { return; } // Open just to be sure. open(true); try { // Create the entry. long timeStamp = System.currentTimeMillis(); ByteArrayOutputStream out = new ByteArrayOutputStream(512); DataOutputStream dataOut = new DataOutputStream(out); byte[] entryData; int dataLength; dataOut.write(ENTRY_START_MARKER); dataOut.writeShort(0); // This will contain the entry length. dataOut.writeLong(timeStamp); dataOut.writeLong((!finished) ? -1 : timeStamp); // This is the finish timestamp. dataOut.writeByte(state.getStateType().ordinal()); state.writeToLog(dataOut); dataOut.write(ENTRY_END_MARKER); dataOut.flush(); entryData = out.toByteArray(); dataLength = entryData.length - 3; // Entry length after entry length. entryData[1] = (byte) (0xFF & (dataLength >> 8)); entryData[2] = (byte) (0xFF & dataLength); // Write the entry data to the file. lastStartEntryPos = file.getFilePointer(); file.write(entryData); } catch (Exception e) { throw new FileStateException(FileStateException.EType.ABORT, LogMessages.UNABLE_TO_WRITE_STATE_TO_LOG_FILE, state.getStateType(),logFile); } } /** * Returns a state enumeration by the ID. * * @param id State ID * * @return Enumeration object or <code>null</code> if the ID was not valid. */ private static EFileState getStateById(int id) { EFileState[] states = EFileState.values(); if ((id < 0) || (id >= states.length)) { return null; } return states[id]; } /** * Parses the log file entries in to this object. * * @param fileContext File context needed to create the states. * @param logWriter Optional XML writer which writes the entry into XML. * * @return Last state that was read successfully. * * @throws FileStateException Thrown if file was corrupt. */ private IFileState parseLogFile(FileContext fileContext, XMLStreamWriter logWriter) throws FileStateException { RandomAccessFile file = null; IFileState currentState = null; try { long length; // Open the file for reading. file = new RandomAccessFile(logFile, "r"); length = file.length(); // Read all entries. while (file.getFilePointer() < length) { int marker; // Start marker is read first. We must be able to read it properly, // or the log file is corrupt. marker = file.read(); if (marker != ENTRY_START_MARKER) { LOG.log(Severity.INFO, "Invalid entry start marker " + marker + " in state log: " + logFile); break; } // Read the entry length and the entry data into an array. short entryLength = file.readShort(); byte[] entryData = new byte[entryLength]; if (file.read(entryData) < entryLength) { LOG.log(Severity.INFO, "End of file reached while reading state log file: " + logFile); break; } // These read operations might fail, if the entry is not complete. try { DataInputStream dataIn = new DataInputStream(new ByteArrayInputStream(entryData)); long startTimestamp; long finishedTimestamp; int stateId; EFileState stateEnum; IFileState state = null; startTimestamp = dataIn.readLong(); finishedTimestamp = dataIn.readLong(); stateId = dataIn.readByte(); stateEnum = getStateById(stateId); if (stateEnum == null) { LOG.log(Severity.INFO, "Invalid state ID " + stateId + " in state log file: " + logFile); break; } if ((currentState == null) || (currentState.getStateType() != stateEnum)) { state = stateEnum.createState(currentState, fileContext); } else { state = currentState; } if (state == null) { LOG.log(Severity.INFO, "Current state not found from in state log: " + logFile); break; } if (logWriter != null) { logWriter.writeStartElement("state"); logWriter.writeAttribute("type", stateEnum.toString()); Utils.writeXmlAttribute(logWriter, "started", (startTimestamp != -1) ? new Date(startTimestamp) : null); Utils.writeXmlAttribute(logWriter, "finished", (finishedTimestamp != -1) ? new Date(finishedTimestamp) : null); } state.readFromLog(dataIn, finishedTimestamp != -1, logWriter); marker = dataIn.read(); if (marker != ENTRY_END_MARKER) { LOG.log(Severity.ERROR, "Invalid entry end marker " + marker + " in state log: " + logFile); break; } if (logWriter != null) { logWriter.writeEndElement(); } currentState = state; } catch (Exception e) { LOG.log(Severity.INFO, "Error while reading entry from state log: " + logFile, e); break; } } } catch (Exception e) { throw new FileStateException(e,FileStateException.EType.ABORT, LogMessages.UNABLE_TO_READ_STATE_LOG,logFile); } finally { if (file != null) { try { file.close(); } catch (Exception e) { LOG.log(Severity.INFO, "Log file closing failed: " + logFile, e); } } } return currentState; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.java.codeInsight.completion; import com.intellij.JavaTestUtil; import com.intellij.application.options.CodeStyle; import com.intellij.codeInsight.completion.LightCompletionTestCase; import com.intellij.codeInsight.lookup.Lookup; import com.intellij.codeInsight.lookup.LookupElementPresentation; import com.intellij.lang.java.JavaLanguage; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiMethod; import com.intellij.testFramework.NeedsIndex; import org.jetbrains.annotations.NotNull; public class KeywordCompletionTest extends LightCompletionTestCase { private static final String BASE_PATH = "/codeInsight/completion/keywords/"; private static final String[] CLASS_SCOPE_KEYWORDS = { "public", "private", "protected", "import", "final", "class", "interface", "abstract", "enum", "default", "record", null}; private static final String[] CLASS_SCOPE_KEYWORDS_2 = { "package", "public", "private", "protected", "transient", "volatile", "static", "import", "final", "class", "interface", "abstract", "default", "record"}; private static final String[] INTERFACE_SCOPE_KEYWORDS = { "package", "public", "private", "protected", "transient", "volatile", "static", "import", "final", "class", "interface", "abstract", "default"}; @NotNull @Override protected String getTestDataPath() { return JavaTestUtil.getJavaTestDataPath(); } public void testFileScope1() { doTest(8, "package", "public", "import", "final", "class", "interface", "abstract", "enum"); assertNotContainItems("private", "default"); } public void testFileScopeAfterComment() { doTest(4, "package", "class", "import", "public", "private"); } public void testFileScopeAfterJavaDoc() { doTest(4, "package", "class", "import", "public", "private"); } public void testFileScopeAfterJavaDocInsideModifierList() { doTest(2, "class", "public"); } public void testFileScope2() { doTest(8, CLASS_SCOPE_KEYWORDS); } public void testClassScope1() { doTest(6, CLASS_SCOPE_KEYWORDS); } public void testClassScope2() { doTest(5, CLASS_SCOPE_KEYWORDS); } public void testClassScope3() { doTest(0, CLASS_SCOPE_KEYWORDS); } public void testClassScope4() { doTest(11, CLASS_SCOPE_KEYWORDS_2); } public void testInnerClassScope() { doTest(11, CLASS_SCOPE_KEYWORDS_2); } public void testInterfaceScope() { setLanguageLevel(LanguageLevel.JDK_1_8); doTest(8, INTERFACE_SCOPE_KEYWORDS); } public void testAfterAnnotations() { doTest(7, "public", "final", "class", "interface", "abstract", "enum", "record", null); } public void testAfterAnnotationsWithParams() { doTest(7, "public", "final", "class", "interface", "abstract", "enum", "record", null); } public void testAfterAnnotationsWithParamsInClass() { doTest(7, "public", "private", "final", "class", "interface", "abstract", "enum"); } public void testExtends1() { doTest(2, "extends", "implements", null); } public void testExtends2() { doTest(1, "extends", "implements", "AAA", "BBB", "instanceof"); } public void testExtends3() { doTest(2, "extends", "implements", "AAA", "BBB", "CCC", "instanceof"); } public void testExtends4() { doTest(2, "extends", "implements", "AAA", "BBB", "CCC", "instanceof"); } public void testExtends5() { doTest(1, "extends", "implements", "AAA", "BBB", "CCC", "instanceof"); } public void testExtends6() { doTest(1, "extends", "implements", "AAA", "BBB", "CCC", "instanceof"); } public void testExtends7() { doTest(); } public void testExtends8() { doTest(); } public void testExtends9() { doTest(); } public void testExtends10() { doTest(); } public void testExtends11() { doTest(); } public void testExtends12() { doTest(); } public void testExtends13() { doTest(); } public void testExtendsAfterClassGenerics() { doTest(2, "extends", "implements"); } public void testSynchronized1() { doTest(); } public void testSynchronized2() { CodeStyle.getSettings(getProject()).getCommonSettings(JavaLanguage.INSTANCE).SPACE_BEFORE_SYNCHRONIZED_PARENTHESES = false; doTest(); } public void testMethodScope1() { doTest(1, "throws"); } public void testMethodScope2() { doTest(1, "final", "public", "static", "volatile", "abstract"); } public void testMethodScope3() { doTest(1, "final", "public", "static", "volatile", "abstract", "throws", "instanceof"); } public void testMethodScope4() { doTest(6, "final", "try", "for", "while", "return", "throw"); } public void testMethodScope5() { doTest(); } public void testElseAfterSemicolon() { doTest(1, "else"); } public void testElseAfterRBrace() { doTest(); } public void testExtraBracketAfterFinally1() { doTest(); } public void testExtraBracketAfterFinally2() { doTest(); } public void testExtendsInCastTypeParameters() { doTest(); } public void testExtendsInCastTypeParameters2() { doTest(2, "extends", "super"); } public void testExtendsWithRightContextInClassTypeParameters() { doTest(); } public void testTrueInVariableDeclaration() { doTest(); } public void testNullInIf() { doTest(); } public void testNullInReturn() { doTest(); } public void testExtendsInMethodParameters() { doTest(); } public void testInstanceOf1() { doTest(); } public void testInstanceOf2() { doTest(); } public void testInstanceOf3() { doTest(); } public void testCatchFinally() { doTest(2, "catch", "finally"); } public void testSecondCatch() { doTest(2, "catch", "finally"); } public void testSuper1() { doTest(1, "super"); } public void testSuper2() { doTest(0, "super"); } public void testSuper3() { doTest(); } public void testSuper4() { doTest(0, "class"); } public void testContinue() { doTest(); } public void testThrowsOnSeparateLine() { doTest(); } public void testDefaultInAnno() { doTest(); } public void testNullInMethodCall() { doTest(); } public void testNullInMethodCall2() { doTest(); } @NeedsIndex.ForStandardLibrary public void testNewInMethodRefs() { doTest(1, "new", "null", "true", "false"); LookupElementPresentation presentation = NormalCompletionTestCase.renderElement(myItems[0]); assertEquals("new", presentation.getItemText()); assertEmpty(presentation.getTailText()); selectItem(myItems[0]); checkResultByTestName(); } @NeedsIndex.ForStandardLibrary public void testNewInMethodRefsArray() { doTest(1, "new", "null", "true", "false"); assertEquals("Object", assertInstanceOf(myItems[0].getPsiElement(), PsiMethod.class).getName()); selectItem(myItems[0]); checkResultByTestName(); } public void testNewInCast() { doTest(2, "new", "null", "true", "false"); } public void testNewInNegation() { if (getIndexingMode() == IndexingMode.DUMB_EMPTY_INDEX) { // Object's methods are not found in empty indices, so the only element is inserted doTest(); } else { doTest(1, "new", "null", "true", "false"); } } public void testSpaceAfterInstanceof() { doTest(); } public void testInstanceofAfterUnresolved() { doTest(1, "instanceof"); } public void testInstanceofAfterStatementStart() { doTest(1, "instanceof"); } public void testNoInstanceofInAnnotation() { doTest(0, "instanceof"); } public void testInstanceofNegated() { doTest(); } public void testInstanceofNegation() { configureByTestName(); selectItem(myItems[0], '!'); checkResultByTestName(); } public void testNoPrimitivesInBooleanAnnotationAttribute() { doTest(1, "true", "int", "boolean"); } public void testNoPrimitivesInIntAnnotationValueAttribute() { doTest(0, "true", "int", "boolean"); } public void testNoPrimitivesInEnumAnnotationAttribute() { doTest(0, "true", "int", "boolean"); } @NeedsIndex.ForStandardLibrary public void testPrimitivesInClassAnnotationValueAttribute() { doTest(2, "true", "int", "boolean"); } @NeedsIndex.ForStandardLibrary public void testPrimitivesInClassAnnotationAttribute() { doTest(3, "true", "int", "boolean"); } public void testPrimitivesInMethodReturningArray() { doTest(2, "true", "byte", "boolean"); } public void testPrimitivesInMethodReturningClass() { doTest(3, "byte", "boolean", "void"); } public void testPrimitivesInRecordHeader() {setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(2, "byte", "boolean"); } public void testNoClassKeywordsInLocalArrayInitializer() { doTest(0, "class", "interface", "enum"); } public void testNoClassKeywordsInFieldArrayInitializer() { doTest(0, "class", "interface", "enum"); } public void testImportStatic() { doTest(1, "static"); } public void testAbstractInInterface() { doTest(1, "abstract"); } public void testCharInAnnotatedParameter() { doTest(1, "char"); } public void testReturnInTernary() { doTest(1, "return"); } public void testReturnInRussian() { doTest(1, "return"); } public void testReturnWithTypo() { doTest(1, "return"); } public void testFinalAfterParameterAnno() { doTest(2, "final", "float", "class"); } public void testFinalAfterParameterAnno2() { doTest(2, "final", "float", "class"); } public void testFinalAfterCase() { doTest(3, "final", "float", "class"); } public void testNoCaseInsideWhileInSwitch() { doTest(0, "case", "default"); } public void testIndentDefaultInSwitch() { doTest(); } public void testFinalInCatch() { doTest(1, "final"); } public void testFinalInIncompleteCatch() { doTest(1, "final"); } public void testFinalInCompleteCatch() { doTest(1, "final"); } public void testFinalInTryWithResources() { doTest(1, "final", "class"); } public void testFinalInCompleteTryWithResources() { doTest(1, "final", "float", "class"); } public void testFinalInLambda() { doTest(2, "final", "float"); } public void testNoFinalAfterTryBody() { doTest(1, "final", "finally"); } public void testClassInMethod() { doTest(2, "class", "char"); } public void testClassInMethodOvertype() { doTest(2, "class", "char"); } public void testIntInClassArray() { doTest(2, "int", "char", "final"); } public void testIntInClassArray2() { doTest(2, "int", "char", "final"); } public void testIntInClassArray3() { doTest(2, "int", "char", "final"); } public void testArrayClass() { doTest(1, "class", "interface"); } public void testIntInGenerics() { doTest(2, "int", "char", "final"); } public void testIntInGenerics2() { doTest(2, "int", "char", "final"); } public void testBreakInLabeledBlock() { doTest(1, "break label", "continue"); } public void testPrimitiveInForLoop() { doTest(1, "int"); } public void testPrimitiveInEnumConstructorCast() { doTest(1, "int"); } public void testNoStatementInForLoopCondition() { doTest(0, "synchronized", "if"); } public void testNoStatementInForLoopUpdate() { doTest(0, "synchronized", "if"); } public void testSuggestModifiersAfterUnfinishedMethod() { doTest(1, "public"); } public void testPrivateInJava9Interface() { setLanguageLevel(LanguageLevel.JDK_1_9); doTest(); } public void testQualifiedNew() { doTest(1, "new"); } public void testRecord() {setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(); } public void testRecordInFileScope() {setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(1, "record"); } public void testNoLocalInterfaceAt15() { setLanguageLevel(LanguageLevel.JDK_15); doTest(0); } public void testLocalInterface() { setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(); } public void testLocalEnum() { setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(); } public void testSealedModifier() {setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(1, "sealed"); } public void testNoSealedModifier() {setLanguageLevel(LanguageLevel.JDK_16); doTest(1, "final"); } public void testPermitsList() {setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(1, "permits"); } public void testEnumPermitsList() {setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(0, "permits"); } public void testInnerClassSealedModifier() {setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(1, "sealed");} public void testInterfaceInnerClassSealedModifier() {setLanguageLevel(LanguageLevel.JDK_15_PREVIEW); doTest(1, "sealed");} public void testOverwriteCatch() { configureByTestName(); selectItem(myItems[0], Lookup.REPLACE_SELECT_CHAR); checkResultByTestName(); } public void testFinalAfterAnnotationAttributes() { doTest(); } public void testAbstractLocalClass() { doTest(); } @NeedsIndex.ForStandardLibrary public void testTryInExpression() { configureByTestName(); assertEquals("toString", myItems[0].getLookupString()); assertEquals("this", myItems[1].getLookupString()); } public void testAfterPackageAnnotation() { configureFromFileText("package-info.java", "@Anno <caret>"); complete(); testByCount(1, "package"); } public void testAfterWildcard() { configureByTestName(); assertStringItems("extends", "super"); } private void doTest() { configureByTestName(); checkResultByTestName(); } private void configureByTestName() { configureByFile(BASE_PATH + getTestName(true) + ".java"); } private void checkResultByTestName() { checkResultByFile(BASE_PATH + getTestName(true) + "_after.java"); } // todo: check included/excluded variants separately protected void doTest(int finalCount, String... values) { configureByTestName(); testByCount(finalCount, values); } }
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.net.wifi; import android.net.NetworkInfo; import android.net.wifi.p2p.WifiP2pConfig; import android.net.wifi.p2p.WifiP2pDevice; import android.net.wifi.p2p.WifiP2pGroup; import android.net.wifi.p2p.WifiP2pService; import android.net.wifi.p2p.WifiP2pService.P2pStatus; import android.net.wifi.p2p.WifiP2pProvDiscEvent; import android.net.wifi.p2p.nsd.WifiP2pServiceResponse; import android.net.wifi.StateChangeResult; import android.os.Message; import android.util.Log; import com.android.internal.util.Protocol; import com.android.internal.util.StateMachine; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import java.util.regex.Matcher; /** * Listens for events from the wpa_supplicant server, and passes them on * to the {@link StateMachine} for handling. Runs in its own thread. * * @hide */ public class WifiMonitor { private static final boolean DBG = false; private static final String TAG = "WifiMonitor"; /** Events we receive from the supplicant daemon */ private static final int CONNECTED = 1; private static final int DISCONNECTED = 2; private static final int STATE_CHANGE = 3; private static final int SCAN_RESULTS = 4; private static final int LINK_SPEED = 5; private static final int TERMINATING = 6; private static final int DRIVER_STATE = 7; private static final int EAP_FAILURE = 8; private static final int ASSOC_REJECT = 9; private static final int UNKNOWN = 10; /** All events coming from the supplicant start with this prefix */ private static final String EVENT_PREFIX_STR = "CTRL-EVENT-"; private static final int EVENT_PREFIX_LEN_STR = EVENT_PREFIX_STR.length(); /** All WPA events coming from the supplicant start with this prefix */ private static final String WPA_EVENT_PREFIX_STR = "WPA:"; private static final String PASSWORD_MAY_BE_INCORRECT_STR = "pre-shared key may be incorrect"; /* WPS events */ private static final String WPS_SUCCESS_STR = "WPS-SUCCESS"; /* Format: WPS-FAIL msg=%d [config_error=%d] [reason=%d (%s)] */ private static final String WPS_FAIL_STR = "WPS-FAIL"; private static final String WPS_FAIL_PATTERN = "WPS-FAIL msg=\\d+(?: config_error=(\\d+))?(?: reason=(\\d+))?"; /* config error code values for config_error=%d */ private static final int CONFIG_MULTIPLE_PBC_DETECTED = 12; private static final int CONFIG_AUTH_FAILURE = 18; /* reason code values for reason=%d */ private static final int REASON_TKIP_ONLY_PROHIBITED = 1; private static final int REASON_WEP_PROHIBITED = 2; private static final String WPS_OVERLAP_STR = "WPS-OVERLAP-DETECTED"; private static final String WPS_TIMEOUT_STR = "WPS-TIMEOUT"; /** * Names of events from wpa_supplicant (minus the prefix). In the * format descriptions, * &quot;<code>x</code>&quot; * designates a dynamic value that needs to be parsed out from the event * string */ /** * <pre> * CTRL-EVENT-CONNECTED - Connection to xx:xx:xx:xx:xx:xx completed * </pre> * <code>xx:xx:xx:xx:xx:xx</code> is the BSSID of the associated access point */ private static final String CONNECTED_STR = "CONNECTED"; /** * <pre> * CTRL-EVENT-DISCONNECTED - Disconnect event - remove keys * </pre> */ private static final String DISCONNECTED_STR = "DISCONNECTED"; /** * <pre> * CTRL-EVENT-STATE-CHANGE x * </pre> * <code>x</code> is the numerical value of the new state. */ private static final String STATE_CHANGE_STR = "STATE-CHANGE"; /** * <pre> * CTRL-EVENT-SCAN-RESULTS ready * </pre> */ private static final String SCAN_RESULTS_STR = "SCAN-RESULTS"; /** * <pre> * CTRL-EVENT-LINK-SPEED x Mb/s * </pre> * {@code x} is the link speed in Mb/sec. */ private static final String LINK_SPEED_STR = "LINK-SPEED"; /** * <pre> * CTRL-EVENT-TERMINATING - signal x * </pre> * <code>x</code> is the signal that caused termination. */ private static final String TERMINATING_STR = "TERMINATING"; /** * <pre> * CTRL-EVENT-DRIVER-STATE state * </pre> * <code>state</code> can be HANGED */ private static final String DRIVER_STATE_STR = "DRIVER-STATE"; /** * <pre> * CTRL-EVENT-EAP-FAILURE EAP authentication failed * </pre> */ private static final String EAP_FAILURE_STR = "EAP-FAILURE"; /** * This indicates an authentication failure on EAP FAILURE event */ private static final String EAP_AUTH_FAILURE_STR = "EAP authentication failed"; /** * This indicates an assoc reject event */ private static final String ASSOC_REJECT_STR = "ASSOC-REJECT"; /** * Regex pattern for extracting an Ethernet-style MAC address from a string. * Matches a strings like the following:<pre> * CTRL-EVENT-CONNECTED - Connection to 00:1e:58:ec:d5:6d completed (reauth) [id=1 id_str=]</pre> */ private static Pattern mConnectedEventPattern = Pattern.compile("((?:[0-9a-f]{2}:){5}[0-9a-f]{2}) .* \\[id=([0-9]+) "); /** P2P events */ private static final String P2P_EVENT_PREFIX_STR = "P2P"; /* P2P-DEVICE-FOUND fa:7b:7a:42:02:13 p2p_dev_addr=fa:7b:7a:42:02:13 pri_dev_type=1-0050F204-1 name='p2p-TEST1' config_methods=0x188 dev_capab=0x27 group_capab=0x0 */ private static final String P2P_DEVICE_FOUND_STR = "P2P-DEVICE-FOUND"; /* P2P-DEVICE-LOST p2p_dev_addr=42:fc:89:e1:e2:27 */ private static final String P2P_DEVICE_LOST_STR = "P2P-DEVICE-LOST"; /* P2P-FIND-STOPPED */ private static final String P2P_FIND_STOPPED_STR = "P2P-FIND-STOPPED"; /* P2P-GO-NEG-REQUEST 42:fc:89:a8:96:09 dev_passwd_id=4 */ private static final String P2P_GO_NEG_REQUEST_STR = "P2P-GO-NEG-REQUEST"; private static final String P2P_GO_NEG_SUCCESS_STR = "P2P-GO-NEG-SUCCESS"; /* P2P-GO-NEG-FAILURE status=x */ private static final String P2P_GO_NEG_FAILURE_STR = "P2P-GO-NEG-FAILURE"; private static final String P2P_GROUP_FORMATION_SUCCESS_STR = "P2P-GROUP-FORMATION-SUCCESS"; private static final String P2P_GROUP_FORMATION_FAILURE_STR = "P2P-GROUP-FORMATION-FAILURE"; /* P2P-GROUP-STARTED p2p-wlan0-0 [client|GO] ssid="DIRECT-W8" freq=2437 [psk=2182b2e50e53f260d04f3c7b25ef33c965a3291b9b36b455a82d77fd82ca15bc|passphrase="fKG4jMe3"] go_dev_addr=fa:7b:7a:42:02:13 [PERSISTENT] */ private static final String P2P_GROUP_STARTED_STR = "P2P-GROUP-STARTED"; /* P2P-GROUP-REMOVED p2p-wlan0-0 [client|GO] reason=REQUESTED */ private static final String P2P_GROUP_REMOVED_STR = "P2P-GROUP-REMOVED"; /* P2P-INVITATION-RECEIVED sa=fa:7b:7a:42:02:13 go_dev_addr=f8:7b:7a:42:02:13 bssid=fa:7b:7a:42:82:13 unknown-network */ private static final String P2P_INVITATION_RECEIVED_STR = "P2P-INVITATION-RECEIVED"; /* P2P-INVITATION-RESULT status=1 */ private static final String P2P_INVITATION_RESULT_STR = "P2P-INVITATION-RESULT"; /* P2P-PROV-DISC-PBC-REQ 42:fc:89:e1:e2:27 p2p_dev_addr=42:fc:89:e1:e2:27 pri_dev_type=1-0050F204-1 name='p2p-TEST2' config_methods=0x188 dev_capab=0x27 group_capab=0x0 */ private static final String P2P_PROV_DISC_PBC_REQ_STR = "P2P-PROV-DISC-PBC-REQ"; /* P2P-PROV-DISC-PBC-RESP 02:12:47:f2:5a:36 */ private static final String P2P_PROV_DISC_PBC_RSP_STR = "P2P-PROV-DISC-PBC-RESP"; /* P2P-PROV-DISC-ENTER-PIN 42:fc:89:e1:e2:27 p2p_dev_addr=42:fc:89:e1:e2:27 pri_dev_type=1-0050F204-1 name='p2p-TEST2' config_methods=0x188 dev_capab=0x27 group_capab=0x0 */ private static final String P2P_PROV_DISC_ENTER_PIN_STR = "P2P-PROV-DISC-ENTER-PIN"; /* P2P-PROV-DISC-SHOW-PIN 42:fc:89:e1:e2:27 44490607 p2p_dev_addr=42:fc:89:e1:e2:27 pri_dev_type=1-0050F204-1 name='p2p-TEST2' config_methods=0x188 dev_capab=0x27 group_capab=0x0 */ private static final String P2P_PROV_DISC_SHOW_PIN_STR = "P2P-PROV-DISC-SHOW-PIN"; /* P2P-PROV-DISC-FAILURE p2p_dev_addr=42:fc:89:e1:e2:27 */ private static final String P2P_PROV_DISC_FAILURE_STR = "P2P-PROV-DISC-FAILURE"; /* * Protocol format is as follows.<br> * See the Table.62 in the WiFi Direct specification for the detail. * ______________________________________________________________ * | Length(2byte) | Type(1byte) | TransId(1byte)}| * ______________________________________________________________ * | status(1byte) | vendor specific(variable) | * * P2P-SERV-DISC-RESP 42:fc:89:e1:e2:27 1 0300000101 * length=3, service type=0(ALL Service), transaction id=1, * status=1(service protocol type not available)<br> * * P2P-SERV-DISC-RESP 42:fc:89:e1:e2:27 1 0300020201 * length=3, service type=2(UPnP), transaction id=2, * status=1(service protocol type not available) * * P2P-SERV-DISC-RESP 42:fc:89:e1:e2:27 1 990002030010757569643a3131323 * 2646534652d383537342d353961622d393332322d3333333435363738393034343a3 * a75726e3a736368656d61732d75706e702d6f72673a736572766963653a436f6e746 * 56e744469726563746f72793a322c757569643a36383539646564652d383537342d3 * 53961622d393333322d3132333435363738393031323a3a75706e703a726f6f74646 * 576696365 * length=153,type=2(UPnP),transaction id=3,status=0 * * UPnP Protocol format is as follows. * ______________________________________________________ * | Version (1) | USN (Variable) | * * version=0x10(UPnP1.0) data=usn:uuid:1122de4e-8574-59ab-9322-33345678 * 9044::urn:schemas-upnp-org:service:ContentDirectory:2,usn:uuid:6859d * ede-8574-59ab-9332-123456789012::upnp:rootdevice * * P2P-SERV-DISC-RESP 58:17:0c:bc:dd:ca 21 1900010200045f6970 * 70c00c000c01094d795072696e746572c027 * length=25, type=1(Bonjour),transaction id=2,status=0 * * Bonjour Protocol format is as follows. * __________________________________________________________ * |DNS Name(Variable)|DNS Type(1)|Version(1)|RDATA(Variable)| * * DNS Name=_ipp._tcp.local.,DNS type=12(PTR), Version=1, * RDATA=MyPrinter._ipp._tcp.local. * */ private static final String P2P_SERV_DISC_RESP_STR = "P2P-SERV-DISC-RESP"; private static final String HOST_AP_EVENT_PREFIX_STR = "AP"; /* AP-STA-CONNECTED 42:fc:89:a8:96:09 dev_addr=02:90:4c:a0:92:54 */ private static final String AP_STA_CONNECTED_STR = "AP-STA-CONNECTED"; /* AP-STA-DISCONNECTED 42:fc:89:a8:96:09 */ private static final String AP_STA_DISCONNECTED_STR = "AP-STA-DISCONNECTED"; /* Supplicant events reported to a state machine */ private static final int BASE = Protocol.BASE_WIFI_MONITOR; /* Connection to supplicant established */ public static final int SUP_CONNECTION_EVENT = BASE + 1; /* Connection to supplicant lost */ public static final int SUP_DISCONNECTION_EVENT = BASE + 2; /* Network connection completed */ public static final int NETWORK_CONNECTION_EVENT = BASE + 3; /* Network disconnection completed */ public static final int NETWORK_DISCONNECTION_EVENT = BASE + 4; /* Scan results are available */ public static final int SCAN_RESULTS_EVENT = BASE + 5; /* Supplicate state changed */ public static final int SUPPLICANT_STATE_CHANGE_EVENT = BASE + 6; /* Password failure and EAP authentication failure */ public static final int AUTHENTICATION_FAILURE_EVENT = BASE + 7; /* WPS success detected */ public static final int WPS_SUCCESS_EVENT = BASE + 8; /* WPS failure detected */ public static final int WPS_FAIL_EVENT = BASE + 9; /* WPS overlap detected */ public static final int WPS_OVERLAP_EVENT = BASE + 10; /* WPS timeout detected */ public static final int WPS_TIMEOUT_EVENT = BASE + 11; /* Driver was hung */ public static final int DRIVER_HUNG_EVENT = BASE + 12; /* P2P events */ public static final int P2P_DEVICE_FOUND_EVENT = BASE + 21; public static final int P2P_DEVICE_LOST_EVENT = BASE + 22; public static final int P2P_GO_NEGOTIATION_REQUEST_EVENT = BASE + 23; public static final int P2P_GO_NEGOTIATION_SUCCESS_EVENT = BASE + 25; public static final int P2P_GO_NEGOTIATION_FAILURE_EVENT = BASE + 26; public static final int P2P_GROUP_FORMATION_SUCCESS_EVENT = BASE + 27; public static final int P2P_GROUP_FORMATION_FAILURE_EVENT = BASE + 28; public static final int P2P_GROUP_STARTED_EVENT = BASE + 29; public static final int P2P_GROUP_REMOVED_EVENT = BASE + 30; public static final int P2P_INVITATION_RECEIVED_EVENT = BASE + 31; public static final int P2P_INVITATION_RESULT_EVENT = BASE + 32; public static final int P2P_PROV_DISC_PBC_REQ_EVENT = BASE + 33; public static final int P2P_PROV_DISC_PBC_RSP_EVENT = BASE + 34; public static final int P2P_PROV_DISC_ENTER_PIN_EVENT = BASE + 35; public static final int P2P_PROV_DISC_SHOW_PIN_EVENT = BASE + 36; public static final int P2P_FIND_STOPPED_EVENT = BASE + 37; public static final int P2P_SERV_DISC_RESP_EVENT = BASE + 38; public static final int P2P_PROV_DISC_FAILURE_EVENT = BASE + 39; /* hostap events */ public static final int AP_STA_DISCONNECTED_EVENT = BASE + 41; public static final int AP_STA_CONNECTED_EVENT = BASE + 42; /* Indicates assoc reject event */ public static final int ASSOCIATION_REJECTION_EVENT = BASE + 43; /** * This indicates the supplicant connection for the monitor is closed */ private static final String MONITOR_SOCKET_CLOSED_STR = "connection closed"; /** * This indicates a read error on the monitor socket conenction */ private static final String WPA_RECV_ERROR_STR = "recv error"; /** * Max errors before we close supplicant connection */ private static final int MAX_RECV_ERRORS = 10; private final String mInterfaceName; private final WifiNative mWifiNative; private final StateMachine mWifiStateMachine; private boolean mMonitoring; public WifiMonitor(StateMachine wifiStateMachine, WifiNative wifiNative) { if (DBG) Log.d(TAG, "Creating WifiMonitor"); mWifiNative = wifiNative; mInterfaceName = wifiNative.mInterfaceName; mWifiStateMachine = wifiStateMachine; mMonitoring = false; WifiMonitorSingleton.getMonitor().registerInterfaceMonitor(mInterfaceName, this); } public void startMonitoring() { WifiMonitorSingleton.getMonitor().startMonitoring(mInterfaceName); } public void stopMonitoring() { WifiMonitorSingleton.getMonitor().stopMonitoring(mInterfaceName); } public void stopSupplicant() { WifiMonitorSingleton.getMonitor().stopSupplicant(); } public void killSupplicant(boolean p2pSupported) { WifiMonitorSingleton.getMonitor().killSupplicant(p2pSupported); } public void closeSupplicantConnection() { WifiMonitorSingleton.getMonitor().closeSupplicantConnection(); } private static class WifiMonitorSingleton { private static Object sSingletonLock = new Object(); private static WifiMonitorSingleton sWifiMonitorSingleton = null; private HashMap<String, WifiMonitor> mIfaceMap = new HashMap<String, WifiMonitor>(); private boolean mConnected = false; private WifiNative mWifiNative; private WifiMonitorSingleton() { } static WifiMonitorSingleton getMonitor() { if (DBG) Log.d(TAG, "WifiMonitorSingleton gotten"); synchronized (sSingletonLock) { if (sWifiMonitorSingleton == null) { if (DBG) Log.d(TAG, "WifiMonitorSingleton created"); sWifiMonitorSingleton = new WifiMonitorSingleton(); } } return sWifiMonitorSingleton; } public synchronized void startMonitoring(String iface) { WifiMonitor m = mIfaceMap.get(iface); if (m == null) { Log.e(TAG, "startMonitor called with unknown iface=" + iface); return; } Log.d(TAG, "startMonitoring(" + iface + ") with mConnected = " + mConnected); if (mConnected) { m.mMonitoring = true; m.mWifiStateMachine.sendMessage(SUP_CONNECTION_EVENT); } else { if (DBG) Log.d(TAG, "connecting to supplicant"); int connectTries = 0; while (true) { if (mWifiNative.connectToSupplicant()) { m.mMonitoring = true; m.mWifiStateMachine.sendMessage(SUP_CONNECTION_EVENT); new MonitorThread(mWifiNative, this).start(); mConnected = true; break; } if (connectTries++ < 5) { try { Thread.sleep(1000); } catch (InterruptedException ignore) { } } else { mIfaceMap.remove(iface); m.mWifiStateMachine.sendMessage(SUP_DISCONNECTION_EVENT); Log.e(TAG, "startMonitoring(" + iface + ") failed!"); break; } } } } public synchronized void stopMonitoring(String iface) { WifiMonitor m = mIfaceMap.get(iface); if (DBG) Log.d(TAG, "stopMonitoring(" + iface + ") = " + m.mWifiStateMachine); m.mMonitoring = false; m.mWifiStateMachine.sendMessage(SUP_DISCONNECTION_EVENT); } public synchronized void registerInterfaceMonitor(String iface, WifiMonitor m) { if (DBG) Log.d(TAG, "registerInterface(" + iface + "+" + m.mWifiStateMachine + ")"); mIfaceMap.put(iface, m); if (mWifiNative == null) { mWifiNative = m.mWifiNative; } } public synchronized void unregisterInterfaceMonitor(String iface) { // REVIEW: When should we call this? If this isn't called, then WifiMonitor // objects will remain in the mIfaceMap; and won't ever get deleted WifiMonitor m = mIfaceMap.remove(iface); if (DBG) Log.d(TAG, "unregisterInterface(" + iface + "+" + m.mWifiStateMachine + ")"); } public synchronized void stopSupplicant() { mWifiNative.stopSupplicant(); } public synchronized void killSupplicant(boolean p2pSupported) { mWifiNative.killSupplicant(p2pSupported); mConnected = false; Iterator<Map.Entry<String, WifiMonitor>> it = mIfaceMap.entrySet().iterator(); while (it.hasNext()) { Map.Entry<String, WifiMonitor> e = it.next(); WifiMonitor m = e.getValue(); m.mMonitoring = false; } } public synchronized void closeSupplicantConnection(){ mWifiNative.closeSupplicantConnection(); } private synchronized WifiMonitor getMonitor(String iface) { return mIfaceMap.get(iface); } } private static class MonitorThread extends Thread { private final WifiNative mWifiNative; private final WifiMonitorSingleton mWifiMonitorSingleton; private int mRecvErrors = 0; private StateMachine mStateMachine = null; public MonitorThread(WifiNative wifiNative, WifiMonitorSingleton wifiMonitorSingleton) { super("WifiMonitor"); mWifiNative = wifiNative; mWifiMonitorSingleton = wifiMonitorSingleton; } public void run() { //noinspection InfiniteLoopStatement for (;;) { String eventStr = mWifiNative.waitForEvent(); // Skip logging the common but mostly uninteresting scan-results event if (DBG && eventStr.indexOf(SCAN_RESULTS_STR) == -1) { Log.d(TAG, "Event [" + eventStr + "]"); } String iface = "p2p0"; WifiMonitor m = null; mStateMachine = null; if (eventStr.startsWith("IFNAME=")) { int space = eventStr.indexOf(' '); if (space != -1) { iface = eventStr.substring(7,space); m = mWifiMonitorSingleton.getMonitor(iface); if (m == null && iface.startsWith("p2p-")) { // p2p interfaces are created dynamically, but we have // only one P2p state machine monitoring all of them; look // for it explicitly, and send messages there .. m = mWifiMonitorSingleton.getMonitor("p2p0"); } eventStr = eventStr.substring(space + 1); } } else { // events without prefix belong to p2p0 monitor m = mWifiMonitorSingleton.getMonitor("p2p0"); } if (m != null) { if (m.mMonitoring) { mStateMachine = m.mWifiStateMachine; } else { if (DBG) Log.d(TAG, "Dropping event because monitor (" + iface + ") is stopped"); continue; } } if (mStateMachine != null) { if (dispatchEvent(eventStr)) { break; } } else { if (DBG) Log.d(TAG, "Sending to all monitors because there's no interface id"); boolean done = false; Iterator<Map.Entry<String, WifiMonitor>> it = mWifiMonitorSingleton.mIfaceMap.entrySet().iterator(); while (it.hasNext()) { Map.Entry<String, WifiMonitor> e = it.next(); m = e.getValue(); mStateMachine = m.mWifiStateMachine; if (dispatchEvent(eventStr)) { done = true; } } if (done) { // After this thread terminates, we'll no longer // be connected to the supplicant if (DBG) Log.d(TAG, "Disconnecting from the supplicant, no more events"); mWifiMonitorSingleton.mConnected = false; break; } } } } /* @return true if the event was supplicant disconnection */ private boolean dispatchEvent(String eventStr) { if (!eventStr.startsWith(EVENT_PREFIX_STR)) { if (eventStr.startsWith(WPA_EVENT_PREFIX_STR) && 0 < eventStr.indexOf(PASSWORD_MAY_BE_INCORRECT_STR)) { mStateMachine.sendMessage(AUTHENTICATION_FAILURE_EVENT); } else if (eventStr.startsWith(WPS_SUCCESS_STR)) { mStateMachine.sendMessage(WPS_SUCCESS_EVENT); } else if (eventStr.startsWith(WPS_FAIL_STR)) { handleWpsFailEvent(eventStr); } else if (eventStr.startsWith(WPS_OVERLAP_STR)) { mStateMachine.sendMessage(WPS_OVERLAP_EVENT); } else if (eventStr.startsWith(WPS_TIMEOUT_STR)) { mStateMachine.sendMessage(WPS_TIMEOUT_EVENT); } else if (eventStr.startsWith(P2P_EVENT_PREFIX_STR)) { handleP2pEvents(eventStr); } else if (eventStr.startsWith(HOST_AP_EVENT_PREFIX_STR)) { handleHostApEvents(eventStr); } else { if (DBG) Log.w(TAG, "couldn't identify event type - " + eventStr); } return false; } String eventName = eventStr.substring(EVENT_PREFIX_LEN_STR); int nameEnd = eventName.indexOf(' '); if (nameEnd != -1) eventName = eventName.substring(0, nameEnd); if (eventName.length() == 0) { if (DBG) Log.i(TAG, "Received wpa_supplicant event with empty event name"); return false; } /* * Map event name into event enum */ int event; if (eventName.equals(CONNECTED_STR)) event = CONNECTED; else if (eventName.equals(DISCONNECTED_STR)) event = DISCONNECTED; else if (eventName.equals(STATE_CHANGE_STR)) event = STATE_CHANGE; else if (eventName.equals(SCAN_RESULTS_STR)) event = SCAN_RESULTS; else if (eventName.equals(LINK_SPEED_STR)) event = LINK_SPEED; else if (eventName.equals(TERMINATING_STR)) event = TERMINATING; else if (eventName.equals(DRIVER_STATE_STR)) event = DRIVER_STATE; else if (eventName.equals(EAP_FAILURE_STR)) event = EAP_FAILURE; else if (eventName.equals(ASSOC_REJECT_STR)) event = ASSOC_REJECT; else event = UNKNOWN; String eventData = eventStr; if (event == DRIVER_STATE || event == LINK_SPEED) eventData = eventData.split(" ")[1]; else if (event == STATE_CHANGE || event == EAP_FAILURE) { int ind = eventStr.indexOf(" "); if (ind != -1) { eventData = eventStr.substring(ind + 1); } } else { int ind = eventStr.indexOf(" - "); if (ind != -1) { eventData = eventStr.substring(ind + 3); } } if (event == STATE_CHANGE) { handleSupplicantStateChange(eventData); } else if (event == DRIVER_STATE) { handleDriverEvent(eventData); } else if (event == TERMINATING) { /** * Close the supplicant connection if we see * too many recv errors */ if (eventData.startsWith(WPA_RECV_ERROR_STR)) { if (++mRecvErrors > MAX_RECV_ERRORS) { if (DBG) { Log.d(TAG, "too many recv errors, closing connection"); } } else { return false; } } // notify and exit mStateMachine.sendMessage(SUP_DISCONNECTION_EVENT); return true; } else if (event == EAP_FAILURE) { if (eventData.startsWith(EAP_AUTH_FAILURE_STR)) { mStateMachine.sendMessage(AUTHENTICATION_FAILURE_EVENT); } } else if (event == ASSOC_REJECT) { mStateMachine.sendMessage(ASSOCIATION_REJECTION_EVENT); } else { handleEvent(event, eventData); } mRecvErrors = 0; return false; } private void handleDriverEvent(String state) { if (state == null) { return; } if (state.equals("HANGED")) { mStateMachine.sendMessage(DRIVER_HUNG_EVENT); } } /** * Handle all supplicant events except STATE-CHANGE * @param event the event type * @param remainder the rest of the string following the * event name and &quot;&#8195;&#8212;&#8195;&quot; */ void handleEvent(int event, String remainder) { switch (event) { case DISCONNECTED: handleNetworkStateChange(NetworkInfo.DetailedState.DISCONNECTED, remainder); break; case CONNECTED: handleNetworkStateChange(NetworkInfo.DetailedState.CONNECTED, remainder); break; case SCAN_RESULTS: mStateMachine.sendMessage(SCAN_RESULTS_EVENT); break; case UNKNOWN: break; } } private void handleWpsFailEvent(String dataString) { final Pattern p = Pattern.compile(WPS_FAIL_PATTERN); Matcher match = p.matcher(dataString); if (match.find()) { String cfgErr = match.group(1); String reason = match.group(2); if (reason != null) { switch(Integer.parseInt(reason)) { case REASON_TKIP_ONLY_PROHIBITED: mStateMachine.sendMessage(mStateMachine.obtainMessage(WPS_FAIL_EVENT, WifiManager.WPS_TKIP_ONLY_PROHIBITED, 0)); return; case REASON_WEP_PROHIBITED: mStateMachine.sendMessage(mStateMachine.obtainMessage(WPS_FAIL_EVENT, WifiManager.WPS_WEP_PROHIBITED, 0)); return; } } if (cfgErr != null) { switch(Integer.parseInt(cfgErr)) { case CONFIG_AUTH_FAILURE: mStateMachine.sendMessage(mStateMachine.obtainMessage(WPS_FAIL_EVENT, WifiManager.WPS_AUTH_FAILURE, 0)); return; case CONFIG_MULTIPLE_PBC_DETECTED: mStateMachine.sendMessage(mStateMachine.obtainMessage(WPS_FAIL_EVENT, WifiManager.WPS_OVERLAP_ERROR, 0)); return; } } } //For all other errors, return a generic internal error mStateMachine.sendMessage(mStateMachine.obtainMessage(WPS_FAIL_EVENT, WifiManager.ERROR, 0)); } /* <event> status=<err> and the special case of <event> reason=FREQ_CONFLICT */ private P2pStatus p2pError(String dataString) { P2pStatus err = P2pStatus.UNKNOWN; String[] tokens = dataString.split(" "); if (tokens.length < 2) return err; String[] nameValue = tokens[1].split("="); if (nameValue.length != 2) return err; /* Handle the special case of reason=FREQ+CONFLICT */ if (nameValue[1].equals("FREQ_CONFLICT")) { return P2pStatus.NO_COMMON_CHANNEL; } try { err = P2pStatus.valueOf(Integer.parseInt(nameValue[1])); } catch (NumberFormatException e) { e.printStackTrace(); } return err; } /** * Handle p2p events */ private void handleP2pEvents(String dataString) { if (dataString.startsWith(P2P_DEVICE_FOUND_STR)) { mStateMachine.sendMessage(P2P_DEVICE_FOUND_EVENT, new WifiP2pDevice(dataString)); } else if (dataString.startsWith(P2P_DEVICE_LOST_STR)) { mStateMachine.sendMessage(P2P_DEVICE_LOST_EVENT, new WifiP2pDevice(dataString)); } else if (dataString.startsWith(P2P_FIND_STOPPED_STR)) { mStateMachine.sendMessage(P2P_FIND_STOPPED_EVENT); } else if (dataString.startsWith(P2P_GO_NEG_REQUEST_STR)) { mStateMachine.sendMessage(P2P_GO_NEGOTIATION_REQUEST_EVENT, new WifiP2pConfig(dataString)); } else if (dataString.startsWith(P2P_GO_NEG_SUCCESS_STR)) { mStateMachine.sendMessage(P2P_GO_NEGOTIATION_SUCCESS_EVENT); } else if (dataString.startsWith(P2P_GO_NEG_FAILURE_STR)) { mStateMachine.sendMessage(P2P_GO_NEGOTIATION_FAILURE_EVENT, p2pError(dataString)); } else if (dataString.startsWith(P2P_GROUP_FORMATION_SUCCESS_STR)) { mStateMachine.sendMessage(P2P_GROUP_FORMATION_SUCCESS_EVENT); } else if (dataString.startsWith(P2P_GROUP_FORMATION_FAILURE_STR)) { mStateMachine.sendMessage(P2P_GROUP_FORMATION_FAILURE_EVENT, p2pError(dataString)); } else if (dataString.startsWith(P2P_GROUP_STARTED_STR)) { mStateMachine.sendMessage(P2P_GROUP_STARTED_EVENT, new WifiP2pGroup(dataString)); } else if (dataString.startsWith(P2P_GROUP_REMOVED_STR)) { mStateMachine.sendMessage(P2P_GROUP_REMOVED_EVENT, new WifiP2pGroup(dataString)); } else if (dataString.startsWith(P2P_INVITATION_RECEIVED_STR)) { mStateMachine.sendMessage(P2P_INVITATION_RECEIVED_EVENT, new WifiP2pGroup(dataString)); } else if (dataString.startsWith(P2P_INVITATION_RESULT_STR)) { mStateMachine.sendMessage(P2P_INVITATION_RESULT_EVENT, p2pError(dataString)); } else if (dataString.startsWith(P2P_PROV_DISC_PBC_REQ_STR)) { mStateMachine.sendMessage(P2P_PROV_DISC_PBC_REQ_EVENT, new WifiP2pProvDiscEvent(dataString)); } else if (dataString.startsWith(P2P_PROV_DISC_PBC_RSP_STR)) { mStateMachine.sendMessage(P2P_PROV_DISC_PBC_RSP_EVENT, new WifiP2pProvDiscEvent(dataString)); } else if (dataString.startsWith(P2P_PROV_DISC_ENTER_PIN_STR)) { mStateMachine.sendMessage(P2P_PROV_DISC_ENTER_PIN_EVENT, new WifiP2pProvDiscEvent(dataString)); } else if (dataString.startsWith(P2P_PROV_DISC_SHOW_PIN_STR)) { mStateMachine.sendMessage(P2P_PROV_DISC_SHOW_PIN_EVENT, new WifiP2pProvDiscEvent(dataString)); } else if (dataString.startsWith(P2P_PROV_DISC_FAILURE_STR)) { mStateMachine.sendMessage(P2P_PROV_DISC_FAILURE_EVENT); } else if (dataString.startsWith(P2P_SERV_DISC_RESP_STR)) { List<WifiP2pServiceResponse> list = WifiP2pServiceResponse.newInstance(dataString); if (list != null) { mStateMachine.sendMessage(P2P_SERV_DISC_RESP_EVENT, list); } else { Log.e(TAG, "Null service resp " + dataString); } } } /** * Handle hostap events */ private void handleHostApEvents(String dataString) { String[] tokens = dataString.split(" "); /* AP-STA-CONNECTED 42:fc:89:a8:96:09 p2p_dev_addr=02:90:4c:a0:92:54 */ if (tokens[0].equals(AP_STA_CONNECTED_STR)) { mStateMachine.sendMessage(AP_STA_CONNECTED_EVENT, new WifiP2pDevice(dataString)); /* AP-STA-DISCONNECTED 42:fc:89:a8:96:09 p2p_dev_addr=02:90:4c:a0:92:54 */ } else if (tokens[0].equals(AP_STA_DISCONNECTED_STR)) { mStateMachine.sendMessage(AP_STA_DISCONNECTED_EVENT, new WifiP2pDevice(dataString)); } } /** * Handle the supplicant STATE-CHANGE event * @param dataString New supplicant state string in the format: * id=network-id state=new-state */ private void handleSupplicantStateChange(String dataString) { WifiSsid wifiSsid = null; int index = dataString.lastIndexOf("SSID="); if (index != -1) { wifiSsid = WifiSsid.createFromAsciiEncoded( dataString.substring(index + 5)); } String[] dataTokens = dataString.split(" "); String BSSID = null; int networkId = -1; int newState = -1; for (String token : dataTokens) { String[] nameValue = token.split("="); if (nameValue.length != 2) { continue; } if (nameValue[0].equals("BSSID")) { BSSID = nameValue[1]; continue; } int value; try { value = Integer.parseInt(nameValue[1]); } catch (NumberFormatException e) { continue; } if (nameValue[0].equals("id")) { networkId = value; } else if (nameValue[0].equals("state")) { newState = value; } } if (newState == -1) return; SupplicantState newSupplicantState = SupplicantState.INVALID; for (SupplicantState state : SupplicantState.values()) { if (state.ordinal() == newState) { newSupplicantState = state; break; } } if (newSupplicantState == SupplicantState.INVALID) { Log.w(TAG, "Invalid supplicant state: " + newState); } notifySupplicantStateChange(networkId, wifiSsid, BSSID, newSupplicantState); } private void handleNetworkStateChange(NetworkInfo.DetailedState newState, String data) { String BSSID = null; int networkId = -1; if (newState == NetworkInfo.DetailedState.CONNECTED) { Matcher match = mConnectedEventPattern.matcher(data); if (!match.find()) { if (DBG) Log.d(TAG, "Could not find BSSID in CONNECTED event string"); } else { BSSID = match.group(1); try { networkId = Integer.parseInt(match.group(2)); } catch (NumberFormatException e) { networkId = -1; } } notifyNetworkStateChange(newState, BSSID, networkId); } } /** * Send the state machine a notification that the state of Wifi connectivity * has changed. * @param networkId the configured network on which the state change occurred * @param newState the new network state * @param BSSID when the new state is {@link DetailedState#CONNECTED * NetworkInfo.DetailedState.CONNECTED}, * this is the MAC address of the access point. Otherwise, it * is {@code null}. */ void notifyNetworkStateChange(NetworkInfo.DetailedState newState, String BSSID, int netId) { if (newState == NetworkInfo.DetailedState.CONNECTED) { Message m = mStateMachine.obtainMessage(NETWORK_CONNECTION_EVENT, netId, 0, BSSID); mStateMachine.sendMessage(m); } else { Message m = mStateMachine.obtainMessage(NETWORK_DISCONNECTION_EVENT, netId, 0, BSSID); mStateMachine.sendMessage(m); } } /** * Send the state machine a notification that the state of the supplicant * has changed. * @param networkId the configured network on which the state change occurred * @param wifiSsid network name * @param BSSID network address * @param newState the new {@code SupplicantState} */ void notifySupplicantStateChange(int networkId, WifiSsid wifiSsid, String BSSID, SupplicantState newState) { mStateMachine.sendMessage(mStateMachine.obtainMessage(SUPPLICANT_STATE_CHANGE_EVENT, new StateChangeResult(networkId, wifiSsid, BSSID, newState))); } } }
package com.timehop.stickyheadersrecyclerview; import android.graphics.Rect; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.View; import com.timehop.stickyheadersrecyclerview.caching.HeaderProvider; import com.timehop.stickyheadersrecyclerview.calculation.DimensionCalculator; import com.timehop.stickyheadersrecyclerview.util.OrientationProvider; /** * Calculates the position and location of header views */ public class HeaderPositionCalculator { private final StickyRecyclerHeadersAdapter mAdapter; private final OrientationProvider mOrientationProvider; private final HeaderProvider mHeaderProvider; private final DimensionCalculator mDimensionCalculator; public HeaderPositionCalculator(StickyRecyclerHeadersAdapter adapter, HeaderProvider headerProvider, OrientationProvider orientationProvider, DimensionCalculator dimensionCalculator) { mAdapter = adapter; mHeaderProvider = headerProvider; mOrientationProvider = orientationProvider; mDimensionCalculator = dimensionCalculator; } /** * Determines if an item in the list should have a header that is different than the item in the * list that immediately precedes it. Items with no headers will always return false. * * @param position of the list item in questions * @return true if this item has a different header than the previous item in the list * @see {@link StickyRecyclerHeadersAdapter#getHeaderId(int)} */ public boolean hasNewHeader(int position) { if (indexOutOfBounds(position)) { return false; } long headerId = mAdapter.getHeaderId(position); if (headerId < 0) { return false; } return position == 0 || headerId != mAdapter.getHeaderId(position - 1); } private boolean indexOutOfBounds(int position) { return position < 0 || position >= mAdapter.getItemCount(); } public Rect getHeaderBounds(RecyclerView recyclerView, View header, View firstView, boolean firstHeader) { int orientation = mOrientationProvider.getOrientation(recyclerView); Rect bounds = getDefaultHeaderOffset(recyclerView, header, firstView, orientation); if (firstHeader && isStickyHeaderBeingPushedOffscreen(recyclerView, header)) { View viewAfterNextHeader = getFirstViewUnobscuredByHeader(recyclerView, header); int firstViewUnderHeaderPosition = recyclerView.getChildAdapterPosition(viewAfterNextHeader); View secondHeader = mHeaderProvider.getHeader(recyclerView, firstViewUnderHeaderPosition); translateHeaderWithNextHeader(recyclerView, mOrientationProvider.getOrientation(recyclerView), bounds, header, viewAfterNextHeader, secondHeader); } return bounds; } private Rect getDefaultHeaderOffset(RecyclerView recyclerView, View header, View firstView, int orientation) { int translationX, translationY; Rect headerMargins = mDimensionCalculator.getMargins(header); if (orientation == LinearLayoutManager.VERTICAL) { translationX = firstView.getLeft() + headerMargins.left; translationY = Math.max( firstView.getTop() - header.getHeight() - headerMargins.bottom, getListTop(recyclerView) + headerMargins.top); } else { translationY = firstView.getTop() + headerMargins.top; translationX = Math.max( firstView.getLeft() - header.getWidth() - headerMargins.right, getListLeft(recyclerView) + headerMargins.left); } return new Rect(translationX, translationY, translationX + header.getWidth(), translationY + header.getHeight()); } private boolean isStickyHeaderBeingPushedOffscreen(RecyclerView recyclerView, View stickyHeader) { View viewAfterHeader = getFirstViewUnobscuredByHeader(recyclerView, stickyHeader); int firstViewUnderHeaderPosition = recyclerView.getChildAdapterPosition(viewAfterHeader); if (firstViewUnderHeaderPosition == RecyclerView.NO_POSITION) { return false; } if (firstViewUnderHeaderPosition > 0 && hasNewHeader(firstViewUnderHeaderPosition)) { View nextHeader = mHeaderProvider.getHeader(recyclerView, firstViewUnderHeaderPosition); Rect nextHeaderMargins = mDimensionCalculator.getMargins(nextHeader); Rect headerMargins = mDimensionCalculator.getMargins(stickyHeader); if (mOrientationProvider.getOrientation(recyclerView) == LinearLayoutManager.VERTICAL) { int topOfNextHeader = viewAfterHeader.getTop() - nextHeaderMargins.bottom - nextHeader.getHeight() - nextHeaderMargins.top; int bottomOfThisHeader = recyclerView.getPaddingTop() + stickyHeader.getBottom() + headerMargins.top + headerMargins.bottom; if (topOfNextHeader < bottomOfThisHeader) { return true; } } else { int leftOfNextHeader = viewAfterHeader.getLeft() - nextHeaderMargins.right - nextHeader.getWidth() - nextHeaderMargins.left; int rightOfThisHeader = recyclerView.getPaddingLeft() + stickyHeader.getRight() + headerMargins.left + headerMargins.right; if (leftOfNextHeader < rightOfThisHeader) { return true; } } } return false; } private void translateHeaderWithNextHeader(RecyclerView recyclerView, int orientation, Rect translation, View currentHeader, View viewAfterNextHeader, View nextHeader) { Rect nextHeaderMargins = mDimensionCalculator.getMargins(nextHeader); Rect stickyHeaderMargins = mDimensionCalculator.getMargins(currentHeader); if (orientation == LinearLayoutManager.VERTICAL) { int topOfStickyHeader = getListTop(recyclerView) + stickyHeaderMargins.top + stickyHeaderMargins.bottom; int shiftFromNextHeader = viewAfterNextHeader.getTop() - nextHeader.getHeight() - nextHeaderMargins.bottom - nextHeaderMargins.top - currentHeader.getHeight() - topOfStickyHeader; if (shiftFromNextHeader < topOfStickyHeader) { translation.top += shiftFromNextHeader; } } else { int leftOfStickyHeader = getListLeft(recyclerView) + stickyHeaderMargins.left + stickyHeaderMargins.right; int shiftFromNextHeader = viewAfterNextHeader.getLeft() - nextHeader.getWidth() - nextHeaderMargins.right - nextHeaderMargins.left - currentHeader.getWidth() - leftOfStickyHeader; if (shiftFromNextHeader < leftOfStickyHeader) { translation.left += shiftFromNextHeader; } } } /** * Returns the first item currently in the RecyclerView that is not obscured by a header. * * @param parent Recyclerview containing all the list items * @return first item that is fully beneath a header */ private View getFirstViewUnobscuredByHeader(RecyclerView parent, View firstHeader) { for (int i = 0; i < parent.getChildCount(); i++) { View child = parent.getChildAt(i); if (!itemIsObscuredByHeader(parent, child, firstHeader, mOrientationProvider.getOrientation(parent))) { return child; } } return null; } /** * Determines if an item is obscured by a header * * * @param parent * @param item to determine if obscured by header * @param header that might be obscuring the item * @param orientation of the {@link RecyclerView} * @return true if the item view is obscured by the header view */ private boolean itemIsObscuredByHeader(RecyclerView parent, View item, View header, int orientation) { RecyclerView.LayoutParams layoutParams = (RecyclerView.LayoutParams) item.getLayoutParams(); Rect headerMargins = mDimensionCalculator.getMargins(header); int adapterPosition = parent.getChildAdapterPosition(item); if (adapterPosition == RecyclerView.NO_POSITION || mHeaderProvider.getHeader(parent, adapterPosition) != header) { // Resolves https://github.com/timehop/sticky-headers-recyclerview/issues/36 // Handles an edge case where a trailing header is smaller than the current sticky header. return false; } if (orientation == LinearLayoutManager.VERTICAL) { int itemTop = item.getTop() - layoutParams.topMargin; int headerBottom = header.getBottom() + headerMargins.bottom + headerMargins.top; if (itemTop > headerBottom) { return false; } } else { int itemLeft = item.getLeft() - layoutParams.leftMargin; int headerRight = header.getRight() + headerMargins.right + headerMargins.left; if (itemLeft > headerRight) { return false; } } return true; } private int getListTop(RecyclerView view) { if (view.getLayoutManager().getClipToPadding()) { return view.getPaddingTop(); } else { return 0; } } private int getListLeft(RecyclerView view) { if (view.getLayoutManager().getClipToPadding()) { return view.getPaddingLeft(); } else { return 0; } } }
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jayway.restassured.itest.java; import com.jayway.restassured.RestAssured; import com.jayway.restassured.builder.RequestSpecBuilder; import com.jayway.restassured.filter.log.RequestLoggingFilter; import com.jayway.restassured.itest.java.support.WithJetty; import com.jayway.restassured.specification.RequestSpecification; import org.apache.commons.io.FileUtils; import org.apache.commons.io.output.WriterOutputStream; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.littleshoot.proxy.HttpProxyServer; import org.littleshoot.proxy.impl.DefaultHttpProxyServer; import java.io.File; import java.io.PrintStream; import java.io.StringWriter; import java.net.ConnectException; import java.net.URI; import java.net.URISyntaxException; import static com.jayway.restassured.RestAssured.given; import static com.jayway.restassured.specification.ProxySpecification.host; import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertThat; public class ProxyITest extends WithJetty { static HttpProxyServer proxyServer; @BeforeClass public static void create_proxy_server() { proxyServer = DefaultHttpProxyServer.bootstrap().withPort(8888).withAllowLocalOnly(true).start(); } @AfterClass public static void stop_proxy_server() { proxyServer.stop(); proxyServer = null; FileUtils.deleteQuietly(new File("littleproxy_cert")); FileUtils.deleteQuietly(new File("littleproxy_keystore.jks")); } @Test public void using_proxy_with_hostname_and_port() { given(). proxy("127.0.0.1", 8888). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } @Test public void using_proxy_with_hostname() { given(). proxy("127.0.0.1"). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } @Test public void using_proxy_with_hostname_as_a_uri() { given(). proxy("http://127.0.0.1:8888"). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } @Ignore("Doesnt work with Proxy?") @Test public void using_proxy_with_https_scheme() { given(). proxy("https://127.0.0.1:8888"). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } @Test public void using_proxy_with_uri() throws URISyntaxException { given(). proxy(new URI("http://127.0.0.1:8888")). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } @Test public void using_proxy_with_proxy_specification() { given(). proxy(host("localhost").and().withPort(8888).and().withScheme("http")). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())). body("greeting.firstName", equalTo("John")). body("greeting.lastName", equalTo("Doe")); } @Test public void using_proxy_with_specification() { RequestSpecification specification = new RequestSpecBuilder().setProxy("localhost").build(); given(). specification(specification). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } @Test public void using_statically_configured_proxy_defined_using_method() { RestAssured.proxy("http://127.0.0.1:8888"); try { given(). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } finally { RestAssured.reset(); } } @Test public void using_statically_configured_proxy_defined_using_field() { RestAssured.proxy = host("127.0.0.1").withPort(8888); try { given(). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } finally { RestAssured.reset(); } } @Test public void using_statically_configured_proxy_defined_using_string_uri_without_port() { exception.expect(ConnectException.class); // Because it will try to connect to port 80 RestAssured.proxy("http://127.0.0.1"); try { given(). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); } finally { RestAssured.reset(); } } @Test public void proxy_details_are_shown_in_the_request_log() { final StringWriter writer = new StringWriter(); final PrintStream captor = new PrintStream(new WriterOutputStream(writer), true); given(). filter(new RequestLoggingFilter(captor)). proxy("127.0.0.1"). param("firstName", "John"). param("lastName", "Doe"). when(). get("/greetJSON"). then(). header("Via", not(isEmptyOrNullString())); assertThat(writer.toString(), equalTo("Request method:\tGET\nRequest path:\thttp://localhost:8080/greetJSON?firstName=John&lastName=Doe\nProxy:\t\t\thttp://127.0.0.1:8888\nRequest params:\tfirstName=John\n\t\t\t\tlastName=Doe\nQuery params:\t<none>\nForm params:\t<none>\nPath params:\t<none>\nMultiparts:\t\t<none>\nHeaders:\t\tAccept=*/*\nCookies:\t\t<none>\nBody:\t\t\t<none>\n")); } }
package com.project.quiz.fragments; import android.app.Activity; import android.net.Uri; import android.os.Bundle; import android.app.Fragment; import android.support.design.widget.TextInputLayout; import android.text.TextUtils; import android.transition.Fade; import android.transition.Slide; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import com.parse.FindCallback; import com.parse.ParseACL; import com.parse.ParseException; import com.parse.ParseQuery; import com.parse.ParseRole; import com.parse.ParseUser; import com.parse.SaveCallback; import com.parse.SignUpCallback; import com.project.quiz.R; import com.project.quiz.customviews.EditTextRegularFont; import com.project.quiz.customviews.TextViewRegularFont; import com.project.quiz.utils.CommonLibs; import java.util.List; import butterknife.Bind; import butterknife.ButterKnife; import butterknife.OnClick; /** * A simple {@link Fragment} subclass. * Activities that contain this fragment must implement the * {@link FragmentSignUp.OnFragmentInteractionListener} interface * to handle interaction events. * Use the {@link FragmentSignUp#newInstance} factory method to * create an instance of this fragment. */ public class FragmentSignUp extends Fragment { // TODO: Rename parameter arguments, choose names that match // the fragment initialization parameters, e.g. ARG_ITEM_NUMBER private static final String ARG_PARAM1 = "param1"; private static final String ARG_PARAM2 = "param2"; // TODO: Rename and change types of parameters private String mParam1; private String mParam2; private OnFragmentInteractionListener mListener; @Bind(R.id.username_field) EditTextRegularFont username; @Bind(R.id.phone_field) EditTextRegularFont phone; @Bind(R.id.year_field) EditTextRegularFont year; @Bind(R.id.password_field) EditTextRegularFont password; @Bind(R.id.password_verify_field) EditTextRegularFont verifyPassword; @Bind(R.id.name_field) EditTextRegularFont name; @Bind(R.id.name_layout) TextInputLayout nameLayout; @Bind(R.id.username_layout) TextInputLayout usernameLayout; @Bind(R.id.phone_layout) TextInputLayout phoneLayout; @Bind(R.id.year_layout) TextInputLayout yearLayout; @Bind(R.id.password_layout) TextInputLayout passwordLayout; @Bind(R.id.password_verify_layout) TextInputLayout verifyPasswordLayout; @OnClick(R.id.button_signup) public void onClick() { boolean result = validateData(); if (result) { final ParseUser user = new ParseUser(); user.setUsername(username.getText().toString().trim()); user.setPassword(password.getText().toString().trim()); user.put("phone", phone.getText().toString().trim()); user.put("year", year.getText().toString().trim()); user.put("name", name.getText().toString().trim()); /** * Sign up User */ user.signUpInBackground(new SignUpCallback() { @Override public void done(ParseException e) { if (e == null) { Toast.makeText(getActivity(), "Sign Up successful", Toast.LENGTH_LONG).show(); ParseQuery<ParseRole> parseRole = ParseRole.getQuery(); parseRole.findInBackground(new FindCallback<ParseRole>() { @Override public void done(List<ParseRole> objects, ParseException e) { if (e == null) { for (ParseRole role : objects) { if (role.getName().equalsIgnoreCase(CommonLibs.Roles.ROLE_ADMINISTRATOR)) { role.getUsers().add(user); role.saveInBackground(new SaveCallback() { @Override public void done(ParseException e) { if (e == null) { if (mListener != null && !getActivity().isDestroyed()) mListener.onFragmentInteraction(null); Log.e("Success", "Role success"); } else { Log.e("Fail", e.getMessage()); } } }); } } } } }); } else { Log.e("Sign Up Error", e.toString()); } } }); } } public final static boolean isValidEmail(CharSequence target) { if (TextUtils.isEmpty(target)) { return false; } else { return android.util.Patterns.EMAIL_ADDRESS.matcher(target).matches(); } } private void setAnimations() { setAllowEnterTransitionOverlap(true); setEnterTransition(new Slide()); setExitTransition(new Fade()); } private boolean validateData() { boolean result = true; if (username.getText().toString().trim().length() <= 0 || !(isValidEmail(username.getText().toString().trim()))) { usernameLayout.setError("Enter valid email"); result = false; } if (name.getText().toString().trim().length() <= 0) { nameLayout.setError("Enter your name"); result = false; } if (phone.getText().toString().trim().length() <= 0) { phone.setError("Enter Phone Number"); result = false; } if (year.getText().toString().trim().length() <= 0) { year.setError("Enter Year"); result = false; } if (password.getText().toString().trim().length() <= 8) { password.setError("Enter password of atleast 8 characters"); result = false; } if (!verifyPassword.getText().toString().trim().equalsIgnoreCase(password.getText().toString().trim())) { verifyPassword.setError("The two passwords do not match!"); result = false; } return result; } /** * Use this factory method to create a new instance of * this fragment using the provided parameters. * * @param param1 Parameter 1. * @param param2 Parameter 2. * @return A new instance of fragment FragmentSignUp. */ // TODO: Rename and change types and number of parameters public static FragmentSignUp newInstance(String param1, String param2) { FragmentSignUp fragment = new FragmentSignUp(); Bundle args = new Bundle(); args.putString(ARG_PARAM1, param1); args.putString(ARG_PARAM2, param2); fragment.setArguments(args); return fragment; } public FragmentSignUp() { // Required empty public constructor } @Override public void onCreate(Bundle savedInstanceState) { setAnimations(); super.onCreate(savedInstanceState); if (getArguments() != null) { mParam1 = getArguments().getString(ARG_PARAM1); mParam2 = getArguments().getString(ARG_PARAM2); } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View view = inflater.inflate(R.layout.fragment_sign_up, container, false); ButterKnife.bind(this, view); return view; } // TODO: Rename method, update argument and hook method into UI event public void onButtonPressed(Uri uri) { if (mListener != null) { mListener.onFragmentInteraction(uri); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (OnFragmentInteractionListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement OnFragmentInteractionListener"); } } @Override public void onDetach() { super.onDetach(); mListener = null; } @Override public void onDestroyView() { ButterKnife.unbind(this); super.onDestroyView(); } /** * This interface must be implemented by activities that contain this * fragment to allow an interaction in this fragment to be communicated * to the activity and potentially other fragments contained in that * activity. * <p/> * See the Android Training lesson <a href= * "http://developer.android.com/training/basics/fragments/communicating.html" * >Communicating with Other Fragments</a> for more information. */ public interface OnFragmentInteractionListener { // TODO: Update argument type and name public void onFragmentInteraction(Uri uri); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sentry.binding.hive; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import java.io.Serializable; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.security.CodeSource; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Set; import java.util.Arrays; import com.google.common.base.Preconditions; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.DDLTask; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask; import org.apache.hadoop.hive.ql.exec.SentryGrantRevokeTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.Entity.Type; import org.apache.hadoop.hive.ql.hooks.Hook; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges; import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope; import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType; import org.apache.sentry.binding.hive.authz.HiveAuthzPrivilegesMap; import org.apache.sentry.binding.hive.conf.HiveAuthzConf; import org.apache.sentry.core.common.Subject; import org.apache.sentry.core.common.utils.PathUtils; import org.apache.sentry.core.model.db.AccessURI; import org.apache.sentry.core.model.db.Column; import org.apache.sentry.core.model.db.DBModelAction; import org.apache.sentry.core.model.db.DBModelAuthorizable; import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType; import org.apache.sentry.core.model.db.Database; import org.apache.sentry.core.model.db.Table; import org.apache.sentry.provider.cache.PrivilegeCache; import org.apache.sentry.provider.cache.SimplePrivilegeCache; import org.apache.sentry.provider.common.AuthorizationProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook { private static final Logger LOG = LoggerFactory .getLogger(HiveAuthzBindingHook.class); private final HiveAuthzBinding hiveAuthzBinding; private final HiveAuthzConf authzConf; private Database currDB = Database.ALL; private Table currTab; private AccessURI udfURI; private AccessURI serdeURI; private AccessURI partitionURI; private Table currOutTab = null; private Database currOutDB = null; private final List<String> serdeWhiteList; private boolean serdeURIPrivilegesEnabled; // True if this is a basic DESCRIBE <table> operation. False for other DESCRIBE variants // like DESCRIBE [FORMATTED|EXTENDED]. Required because Hive treats these stmts as the same // HiveOperationType, but we want to enforces different privileges on each statement. // Basic DESCRIBE <table> is allowed with only column-level privs, while the variants // require table-level privileges. public boolean isDescTableBasic = false; public HiveAuthzBindingHook() throws Exception { SessionState session = SessionState.get(); if(session == null) { throw new IllegalStateException("Session has not been started"); } // HACK: set a random classname to force the Auth V2 in Hive SessionState.get().setAuthorizer(null); HiveConf hiveConf = session.getConf(); if(hiveConf == null) { throw new IllegalStateException("Session HiveConf is null"); } authzConf = loadAuthzConf(hiveConf); hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf); String serdeWhiteLists = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST, HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT); serdeWhiteList = Arrays.asList(serdeWhiteLists.split(",")); serdeURIPrivilegesEnabled = authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED, HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT); FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST); } public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) { boolean depreicatedConfigFile = false; HiveAuthzConf newAuthzConf = null; String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL); if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) { hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL); depreicatedConfigFile = true; } if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) { throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf + "' is invalid."); } try { newAuthzConf = new HiveAuthzConf(new URL(hiveAuthzConf)); } catch (MalformedURLException e) { if (depreicatedConfigFile) { throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_ACCESS_CONF_URL + " specifies a malformed URL '" + hiveAuthzConf + "'", e); } else { throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " specifies a malformed URL '" + hiveAuthzConf + "'", e); } } return newAuthzConf; } /** * Pre-analyze hook called after compilation and before semantic analysis We * extract things for to Database and metadata level operations which are not * capture in the input/output entities during semantic analysis. Ideally it * should be handled in Hive. We need to move most of these into hive semantic * analyzer and then remove it from the access hook. */ @Override public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { switch (ast.getToken().getType()) { // Hive parser doesn't capture the database name in output entity, so we store it here for now case HiveParser.TOK_CREATEDATABASE: case HiveParser.TOK_ALTERDATABASE_PROPERTIES: case HiveParser.TOK_DROPDATABASE: case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DESCDATABASE: currDB = new Database(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText())); break; case HiveParser.TOK_CREATETABLE: for (Node childNode : ast.getChildren()) { ASTNode childASTNode = (ASTNode) childNode; if ("TOK_TABLESERIALIZER".equals(childASTNode.getText())) { ASTNode serdeNode = (ASTNode)childASTNode.getChild(0); String serdeClassName = BaseSemanticAnalyzer.unescapeSQLString(serdeNode.getChild(0).getText()); setSerdeURI(serdeClassName); } } case HiveParser.TOK_CREATEVIEW: /* * Compiler doesn't create read/write entities for create table. * Hence we need extract dbname from db.tab format, if applicable */ currDB = extractDatabase((ASTNode)ast.getChild(0)); break; case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DROPVIEW: case HiveParser.TOK_SHOW_CREATETABLE: case HiveParser.TOK_ALTERTABLE_SERIALIZER: case HiveParser.TOK_ALTERVIEW_ADDPARTS: case HiveParser.TOK_ALTERVIEW_DROPPARTS: case HiveParser.TOK_ALTERVIEW_PROPERTIES: case HiveParser.TOK_ALTERVIEW_RENAME: case HiveParser.TOK_CREATEINDEX: case HiveParser.TOK_DROPINDEX: case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: currTab = extractTable((ASTNode)ast.getFirstChildWithType(HiveParser.TOK_TABNAME)); currDB = extractDatabase((ASTNode) ast.getChild(0)); break; case HiveParser.TOK_ALTERINDEX_REBUILD: currTab = extractTable((ASTNode)ast.getChild(0)); //type is not TOK_TABNAME currDB = extractDatabase((ASTNode) ast.getChild(0)); break; case HiveParser.TOK_SHOW_TABLESTATUS: currDB = extractDatabase((ASTNode)ast.getChild(0)); int children = ast.getChildCount(); for (int i = 1; i < children; i++) { ASTNode child = (ASTNode) ast.getChild(i); if (child.getToken().getType() == HiveParser.Identifier) { currDB = new Database(child.getText()); break; } } //loosing the requested privileges for possible wildcard tables, since //further authorization will be done at the filter step and those unwanted will //eventually be filtered out from the output currTab = Table.ALL; break; case HiveParser.TOK_ALTERTABLE_RENAME: case HiveParser.TOK_ALTERTABLE_PROPERTIES: case HiveParser.TOK_ALTERTABLE_DROPPARTS: case HiveParser.TOK_ALTERTABLE_RENAMECOL: case HiveParser.TOK_ALTERTABLE_ADDCOLS: case HiveParser.TOK_ALTERTABLE_REPLACECOLS: case HiveParser.TOK_SHOW_TBLPROPERTIES: case HiveParser.TOK_SHOWINDEXES: case HiveParser.TOK_SHOWPARTITIONS: //token name TOK_TABNAME is not properly set in this case currTab = extractTable((ASTNode)ast.getChild(0)); currDB = extractDatabase((ASTNode)ast.getChild(0)); break; case HiveParser.TOK_MSCK: // token name TOK_TABNAME is not properly set in this case and child(0) does // not contain the table name. // TODO: Fix Hive to capture the table and DB name currOutTab = extractTable((ASTNode)ast.getChild(1)); currOutDB = extractDatabase((ASTNode)ast.getChild(0)); break; case HiveParser.TOK_ALTERTABLE_ADDPARTS: /* * Compiler doesn't create read/write entities for create table. * Hence we need extract dbname from db.tab format, if applicable */ currTab = extractTable((ASTNode)ast.getChild(0)); currDB = extractDatabase((ASTNode)ast.getChild(0)); partitionURI = extractPartition(ast); break; case HiveParser.TOK_CREATEFUNCTION: String udfClassName = BaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText()); try { CodeSource udfSrc = Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader()) .getProtectionDomain().getCodeSource(); if (udfSrc == null) { throw new SemanticException("Could not resolve the jar for UDF class " + udfClassName); } String udfJar = udfSrc.getLocation().getPath(); if (udfJar == null || udfJar.isEmpty()) { throw new SemanticException("Could not find the jar for UDF class " + udfClassName + "to validate privileges"); } udfURI = parseURI(udfSrc.getLocation().toString(), true); } catch (ClassNotFoundException e) { throw new SemanticException("Error retrieving udf class:" + e.getMessage(), e); } // create/drop function is allowed with any database currDB = Database.ALL; break; case HiveParser.TOK_DROPFUNCTION: // create/drop function is allowed with any database currDB = Database.ALL; break; case HiveParser.TOK_LOAD: String dbName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(1).getChild(0).getChild(0).getText()); currDB = new Database(dbName); break; case HiveParser.TOK_DESCTABLE: currDB = getCanonicalDb(); // For DESCRIBE FORMATTED/EXTENDED ast will have an additional child node with value // "FORMATTED/EXTENDED". isDescTableBasic = (ast.getChildCount() == 1); break; case HiveParser.TOK_TRUNCATETABLE: // SENTRY-826: // Truncate empty partitioned table should throw SemanticException only if the // user does not have permission. // In postAnalyze, currOutDB and currOutTbl will be added into outputHierarchy // which will be validated in the hiveAuthzBinding.authorize method. Preconditions.checkArgument(ast.getChildCount() == 1); // childcount is 1 for table without partition, 2 for table with partitions Preconditions.checkArgument(ast.getChild(0).getChildCount() >= 1); Preconditions.checkArgument(ast.getChild(0).getChild(0).getChildCount() == 1); currOutDB = extractDatabase((ASTNode) ast.getChild(0)); currOutTab = extractTable((ASTNode) ast.getChild(0).getChild(0).getChild(0)); break; case HiveParser.TOK_ALTERTABLE: for (Node childNode : ast.getChildren()) { ASTNode childASTNode = (ASTNode) childNode; if ("TOK_ALTERTABLE_SERIALIZER".equals(childASTNode.getText())) { ASTNode serdeNode = (ASTNode)childASTNode.getChild(0); String serdeClassName = BaseSemanticAnalyzer.unescapeSQLString(serdeNode.getText()); setSerdeURI(serdeClassName); } } default: currDB = getCanonicalDb(); break; } return ast; } // Find the current database for session private Database getCanonicalDb() { return new Database(SessionState.get().getCurrentDatabase()); } private Database extractDatabase(ASTNode ast) throws SemanticException { String tableName = BaseSemanticAnalyzer.getUnescapedName(ast); if (tableName.contains(".")) { return new Database(tableName.split("\\.")[0]); } else { return getCanonicalDb(); } } private Table extractTable(ASTNode ast) throws SemanticException { String tableName = BaseSemanticAnalyzer.getUnescapedName(ast); if (tableName.contains(".")) { return new Table(tableName.split("\\.")[1]); } else { return new Table(tableName); } } @VisibleForTesting protected static AccessURI extractPartition(ASTNode ast) throws SemanticException { for (int i = 0; i < ast.getChildCount(); i++) { ASTNode child = (ASTNode)ast.getChild(i); if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION && child.getChildCount() == 1) { return parseURI(BaseSemanticAnalyzer. unescapeSQLString(child.getChild(0).getText())); } } return null; } @VisibleForTesting protected static AccessURI parseURI(String uri) throws SemanticException { return parseURI(uri, false); } @VisibleForTesting protected static AccessURI parseURI(String uri, boolean isLocal) throws SemanticException { try { HiveConf conf = SessionState.get().getConf(); String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE); Path warehousePath = new Path(warehouseDir); if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) { FileSystem fs = FileSystem.get(conf); warehouseDir = fs.makeQualified(warehousePath).toUri().toString(); } return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal)); } catch (Exception e) { throw new SemanticException("Error parsing URI " + uri + ": " + e.getMessage(), e); } } /** * Post analyze hook that invokes hive auth bindings */ @Override public void postAnalyze(HiveSemanticAnalyzerHookContext context, List<Task<? extends Serializable>> rootTasks) throws SemanticException { HiveOperation stmtOperation = getCurrentHiveStmtOp(); HiveAuthzPrivileges stmtAuthObject; stmtAuthObject = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(stmtOperation); // must occur above the null check on stmtAuthObject // since GRANT/REVOKE/etc are not authorized by binding layer at present Subject subject = getCurrentSubject(context); Set<String> subjectGroups = hiveAuthzBinding.getGroups(subject); for (Task<? extends Serializable> task : rootTasks) { if (task instanceof SentryGrantRevokeTask) { SentryGrantRevokeTask sentryTask = (SentryGrantRevokeTask)task; sentryTask.setHiveAuthzBinding(hiveAuthzBinding); sentryTask.setAuthzConf(authzConf); sentryTask.setSubject(subject); sentryTask.setSubjectGroups(subjectGroups); sentryTask.setIpAddress(context.getIpAddress()); sentryTask.setOperation(stmtOperation); } } try { if (stmtAuthObject == null) { // We don't handle authorizing this statement return; } /** * Replace DDLTask using the SentryFilterDDLTask for protection, * such as "show column" only allow show some column that user can access to. * SENTRY-847 */ for (int i = 0; i < rootTasks.size(); i++) { Task<? extends Serializable> task = rootTasks.get(i); if (task instanceof DDLTask) { SentryFilterDDLTask filterTask = new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation); filterTask.setWork((DDLWork)task.getWork()); rootTasks.set(i, filterTask); } } authorizeWithHiveBindings(context, stmtAuthObject, stmtOperation); } catch (AuthorizationException e) { executeOnFailureHooks(context, stmtOperation, e); StringBuilder permsBuilder = new StringBuilder(); for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) { permsBuilder.append(perm); permsBuilder.append(";"); } String permsRequired = permsBuilder.toString(); SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, permsRequired); String msgForLog = HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE + "\n Required privileges for this query: " + permsRequired; String msgForConsole = HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE + "\n " + e.getMessage()+ "\n The required privileges: " + permsRequired; // AuthorizationException is not a real exception, use the info level to record this. LOG.info(msgForLog); throw new SemanticException(msgForConsole, e); } finally { hiveAuthzBinding.close(); } if ("true".equalsIgnoreCase(context.getConf(). get(HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION))) { throw new SemanticException(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR + " Mock query compilation aborted. Set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION + " to 'false' for normal query processing"); } } private void executeOnFailureHooks(HiveSemanticAnalyzerHookContext context, HiveOperation hiveOp, AuthorizationException e) { SentryOnFailureHookContext hookCtx = new SentryOnFailureHookContextImpl( context.getCommand(), context.getInputs(), context.getOutputs(), hiveOp, currDB, currTab, udfURI, null, context.getUserName(), context.getIpAddress(), e, context.getConf()); String csHooks = authzConf.get( HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim(); try { for (Hook aofh : getHooks(csHooks)) { ((SentryOnFailureHook)aofh).run(hookCtx); } } catch (Exception ex) { LOG.error("Error executing hook:", ex); } } public static void runFailureHook(SentryOnFailureHookContext hookContext, String csHooks) { try { for (Hook aofh : getHooks(csHooks)) { ((SentryOnFailureHook) aofh).run(hookContext); } } catch (Exception ex) { LOG.error("Error executing hook:", ex); } } /** * Convert the input/output entities into authorizables. generate * authorizables for cases like Database and metadata operations where the * compiler doesn't capture entities. invoke the hive binding to validate * permissions * * @param context * @param stmtAuthObject * @param stmtOperation * @throws AuthorizationException */ private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context, HiveAuthzPrivileges stmtAuthObject, HiveOperation stmtOperation) throws AuthorizationException { Set<ReadEntity> inputs = context.getInputs(); Set<WriteEntity> outputs = context.getOutputs(); List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); if(LOG.isDebugEnabled()) { LOG.debug("stmtAuthObject.getOperationScope() = " + stmtAuthObject.getOperationScope()); LOG.debug("context.getInputs() = " + context.getInputs()); LOG.debug("context.getOutputs() = " + context.getOutputs()); } // Workaround to allow DESCRIBE <table> to be executed with only column-level privileges, while // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level. // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, which only requires column // level privs. if (isDescTableBasic) { stmtAuthObject = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS); } switch (stmtAuthObject.getOperationScope()) { case SERVER : // validate server level privileges if applicable. Eg create UDF,register jar etc .. List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>(); serverHierarchy.add(hiveAuthzBinding.getAuthServer()); inputHierarchy.add(serverHierarchy); break; case DATABASE: // workaround for database scope statements (create/alter/drop db) List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>(); dbHierarchy.add(hiveAuthzBinding.getAuthServer()); dbHierarchy.add(currDB); inputHierarchy.add(dbHierarchy); outputHierarchy.add(dbHierarchy); getInputHierarchyFromInputs(inputHierarchy, inputs); if (serdeURI != null) { List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>(); serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer()); serdeUriHierarchy.add(serdeURI); outputHierarchy.add(serdeUriHierarchy); } break; case TABLE: // workaround for add partitions if(partitionURI != null) { inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), partitionURI)); } getInputHierarchyFromInputs(inputHierarchy, inputs); for (WriteEntity writeEntity: outputs) { if (filterWriteEntity(writeEntity)) { continue; } List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); entityHierarchy.add(hiveAuthzBinding.getAuthServer()); entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity)); outputHierarchy.add(entityHierarchy); } // workaround for metadata queries. // Capture the table name in pre-analyze and include that in the input entity list if (currTab != null) { List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); externalAuthorizableHierarchy.add(currDB); externalAuthorizableHierarchy.add(currTab); inputHierarchy.add(externalAuthorizableHierarchy); } // workaround for DDL statements // Capture the table name in pre-analyze and include that in the output entity list if (currOutTab != null) { List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); externalAuthorizableHierarchy.add(currOutDB); externalAuthorizableHierarchy.add(currOutTab); outputHierarchy.add(externalAuthorizableHierarchy); } if (serdeURI != null) { List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>(); serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer()); serdeUriHierarchy.add(serdeURI); outputHierarchy.add(serdeUriHierarchy); } break; case FUNCTION: /* The 'FUNCTION' privilege scope currently used for * - CREATE TEMP FUNCTION * - DROP TEMP FUNCTION. */ if (udfURI != null) { List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>(); udfUriHierarchy.add(hiveAuthzBinding.getAuthServer()); udfUriHierarchy.add(udfURI); inputHierarchy.add(udfUriHierarchy); for (WriteEntity writeEntity : outputs) { List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); entityHierarchy.add(hiveAuthzBinding.getAuthServer()); entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity)); outputHierarchy.add(entityHierarchy); } } break; case CONNECT: /* The 'CONNECT' is an implicit privilege scope currently used for * - USE <db> * It's allowed when the user has any privilege on the current database. For application * backward compatibility, we allow (optional) implicit connect permission on 'default' db. */ List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>(); connectHierarchy.add(hiveAuthzBinding.getAuthServer()); // by default allow connect access to default db Table currTbl = Table.ALL; Column currCol = Column.ALL; if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) && "false".equalsIgnoreCase(authzConf. get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) { currDB = Database.ALL; currTbl = Table.SOME; } connectHierarchy.add(currDB); connectHierarchy.add(currTbl); connectHierarchy.add(currCol); inputHierarchy.add(connectHierarchy); outputHierarchy.add(connectHierarchy); break; case COLUMN: for (ReadEntity readEntity: inputs) { if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) { addColumnHierarchy(inputHierarchy, readEntity); } else { List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); entityHierarchy.add(hiveAuthzBinding.getAuthServer()); entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity)); entityHierarchy.add(Column.ALL); inputHierarchy.add(entityHierarchy); } } break; default: throw new AuthorizationException("Unknown operation scope type " + stmtAuthObject.getOperationScope().toString()); } HiveAuthzBinding binding = null; try { binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName()); } catch (SemanticException e) { // Will use the original hiveAuthzBinding binding = hiveAuthzBinding; } // validate permission binding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy, outputHierarchy); } private HiveOperation getCurrentHiveStmtOp() { SessionState sessState = SessionState.get(); if (sessState == null) { // TODO: Warn return null; } return sessState.getHiveOperation(); } private Subject getCurrentSubject(HiveSemanticAnalyzerHookContext context) { // Extract the username from the hook context return new Subject(context.getUserName()); } // Build the hierarchy of authorizable object for the given entity type. private List<DBModelAuthorizable> getAuthzHierarchyFromEntity(Entity entity) { List<DBModelAuthorizable> objectHierarchy = new ArrayList<DBModelAuthorizable>(); switch (entity.getType()) { case TABLE: objectHierarchy.add(new Database(entity.getTable().getDbName())); objectHierarchy.add(new Table(entity.getTable().getTableName())); break; case PARTITION: case DUMMYPARTITION: objectHierarchy.add(new Database(entity.getPartition().getTable().getDbName())); objectHierarchy.add(new Table(entity.getPartition().getTable().getTableName())); break; case DFS_DIR: case LOCAL_DIR: try { objectHierarchy.add(parseURI(entity.toString(), entity.getType().equals(Entity.Type.LOCAL_DIR))); } catch (Exception e) { throw new AuthorizationException("Failed to get File URI", e); } break; case DATABASE: case FUNCTION: // TODO use database entities from compiler instead of capturing from AST break; default: throw new UnsupportedOperationException("Unsupported entity type " + entity.getType().name()); } return objectHierarchy; } /** * Add column level hierarchy to inputHierarchy * * @param inputHierarchy * @param entity * @param sentryContext */ private void addColumnHierarchy(List<List<DBModelAuthorizable>> inputHierarchy, ReadEntity entity) { List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); entityHierarchy.add(hiveAuthzBinding.getAuthServer()); entityHierarchy.addAll(getAuthzHierarchyFromEntity(entity)); switch (entity.getType()) { case TABLE: case PARTITION: List<String> cols = entity.getAccessedColumns(); for (String col : cols) { List<DBModelAuthorizable> colHierarchy = new ArrayList<DBModelAuthorizable>(entityHierarchy); colHierarchy.add(new Column(col)); inputHierarchy.add(colHierarchy); } break; default: inputHierarchy.add(entityHierarchy); } } /** * Get Authorizable from inputs and put into inputHierarchy * * @param inputHierarchy * @param entity * @param sentryContext */ private void getInputHierarchyFromInputs(List<List<DBModelAuthorizable>> inputHierarchy, Set<ReadEntity> inputs) { for (ReadEntity readEntity: inputs) { // skip the tables/view that are part of expanded view definition // skip the Hive generated dummy entities created for queries like 'select <expr>' if (isChildTabForView(readEntity) || isDummyEntity(readEntity)) { continue; } if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) { addColumnHierarchy(inputHierarchy, readEntity); } else { List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); entityHierarchy.add(hiveAuthzBinding.getAuthServer()); entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity)); inputHierarchy.add(entityHierarchy); } } } // Check if this write entity needs to skipped private boolean filterWriteEntity(WriteEntity writeEntity) throws AuthorizationException { // skip URI validation for session scratch file URIs if (writeEntity.isTempURI()) { return true; } try { if (writeEntity.getTyp().equals(Type.DFS_DIR) || writeEntity.getTyp().equals(Type.LOCAL_DIR)) { HiveConf conf = SessionState.get().getConf(); String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE); URI scratchURI = new URI(PathUtils.parseDFSURI(warehouseDir, conf.getVar(HiveConf.ConfVars.SCRATCHDIR))); URI requestURI = new URI(PathUtils.parseDFSURI(warehouseDir, writeEntity.getLocation().getPath())); LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + requestURI); if (PathUtils.impliesURI(scratchURI, requestURI)) { return true; } URI localScratchURI = new URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR))); URI localRequestURI = new URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath())); LOG.debug("localScratchURI = " + localScratchURI + ", localRequestURI = " + localRequestURI); if (PathUtils.impliesURI(localScratchURI, localRequestURI)) { return true; } } } catch (Exception e) { throw new AuthorizationException("Failed to extract uri details", e); } return false; } public static List<String> filterShowTables( HiveAuthzBinding hiveAuthzBinding, List<String> queryResult, HiveOperation operation, String userName, String dbName) throws SemanticException { List<String> filteredResult = new ArrayList<String>(); Subject subject = new Subject(userName); HiveAuthzPrivileges tableMetaDataPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder(). addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)). setOperationScope(HiveOperationScope.TABLE). setOperationType(HiveOperationType.INFO). build(); HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName); for (String tableName : queryResult) { // if user has privileges on table, add to filtered list, else discard Table table = new Table(tableName); Database database; database = new Database(dbName); List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); externalAuthorizableHierarchy.add(database); externalAuthorizableHierarchy.add(table); externalAuthorizableHierarchy.add(Column.ALL); inputHierarchy.add(externalAuthorizableHierarchy); try { // do the authorization by new HiveAuthzBinding with PrivilegeCache hiveBindingWithPrivilegeCache.authorize(operation, tableMetaDataPrivilege, subject, inputHierarchy, outputHierarchy); filteredResult.add(table.getName()); } catch (AuthorizationException e) { // squash the exception, user doesn't have privileges, so the table is // not added to // filtered list. } } return filteredResult; } public static List<FieldSchema> filterShowColumns( HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols, HiveOperation operation, String userName, String tableName, String dbName) throws SemanticException { List<FieldSchema> filteredResult = new ArrayList<FieldSchema>(); Subject subject = new Subject(userName); HiveAuthzPrivileges columnMetaDataPrivilege = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS); HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName); Database database = new Database(dbName); Table table = new Table(tableName); for (FieldSchema col : cols) { // if user has privileges on column, add to filtered list, else discard List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); externalAuthorizableHierarchy.add(database); externalAuthorizableHierarchy.add(table); externalAuthorizableHierarchy.add(new Column(col.getName())); inputHierarchy.add(externalAuthorizableHierarchy); try { // do the authorization by new HiveAuthzBinding with PrivilegeCache hiveBindingWithPrivilegeCache.authorize(operation, columnMetaDataPrivilege, subject, inputHierarchy, outputHierarchy); filteredResult.add(col); } catch (AuthorizationException e) { // squash the exception, user doesn't have privileges, so the column is // not added to // filtered list. } } return filteredResult; } public static List<String> filterShowDatabases( HiveAuthzBinding hiveAuthzBinding, List<String> queryResult, HiveOperation operation, String userName) throws SemanticException { List<String> filteredResult = new ArrayList<String>(); Subject subject = new Subject(userName); HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName); HiveAuthzPrivileges anyPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder(). addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)). addInputObjectPriviledge(AuthorizableType.URI, EnumSet.of(DBModelAction.SELECT)). setOperationScope(HiveOperationScope.CONNECT). setOperationType(HiveOperationType.QUERY). build(); for (String dbName:queryResult) { // if user has privileges on database, add to filtered list, else discard Database database = null; // if default is not restricted, continue if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) && "false".equalsIgnoreCase( hiveAuthzBinding.getAuthzConf().get( HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) { filteredResult.add(DEFAULT_DATABASE_NAME); continue; } database = new Database(dbName); List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); externalAuthorizableHierarchy.add(database); externalAuthorizableHierarchy.add(Table.ALL); externalAuthorizableHierarchy.add(Column.ALL); inputHierarchy.add(externalAuthorizableHierarchy); try { // do the authorization by new HiveAuthzBinding with PrivilegeCache hiveBindingWithPrivilegeCache.authorize(operation, anyPrivilege, subject, inputHierarchy, outputHierarchy); filteredResult.add(database.getName()); } catch (AuthorizationException e) { // squash the exception, user doesn't have privileges, so the table is // not added to // filtered list. } } return filteredResult; } /** * Check if the given read entity is a table that has parents of type Table * Hive compiler performs a query rewrite by replacing view with its definition. In the process, tt captures both * the original view and the tables/view that it selects from . * The access authorization is only interested in the top level views and not the underlying tables. * @param readEntity * @return */ private boolean isChildTabForView(ReadEntity readEntity) { // If this is a table added for view, then we need to skip that if (!readEntity.getType().equals(Type.TABLE) && !readEntity.getType().equals(Type.PARTITION)) { return false; } if (readEntity.getParents() != null && readEntity.getParents().size() > 0) { for (ReadEntity parentEntity : readEntity.getParents()) { if (!parentEntity.getType().equals(Type.TABLE)) { return false; } } return true; } else { return false; } } /** * Returns the hooks specified in a configuration variable. The hooks are returned in a list in * the order they were specified in the configuration variable. * * @param hookConfVar The configuration variable specifying a comma separated list of the hook * class names. * @return A list of the hooks, in the order they are listed in the value of hookConfVar * @throws Exception */ private static <T extends Hook> List<T> getHooks(String csHooks) throws Exception { List<T> hooks = new ArrayList<T>(); if (csHooks.isEmpty()) { return hooks; } for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) { try { @SuppressWarnings("unchecked") T hook = (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance(); hooks.add(hook); } catch (ClassNotFoundException e) { LOG.error(hookClass + " Class not found:" + e.getMessage()); throw e; } } return hooks; } // Check if the given entity is identified as dummy by Hive compilers. private boolean isDummyEntity(Entity entity) { return entity.isDummy(); } // create hiveBinding with PrivilegeCache private static HiveAuthzBinding getHiveBindingWithPrivilegeCache(HiveAuthzBinding hiveAuthzBinding, String userName) throws SemanticException { // get the original HiveAuthzBinding, and get the user's privileges by AuthorizationProvider AuthorizationProvider authProvider = hiveAuthzBinding.getCurrentAuthProvider(); Set<String> userPrivileges = authProvider.getPolicyEngine().getPrivileges( authProvider.getGroupMapping().getGroups(userName), hiveAuthzBinding.getActiveRoleSet(), hiveAuthzBinding.getAuthServer()); // create PrivilegeCache using user's privileges PrivilegeCache privilegeCache = new SimplePrivilegeCache(userPrivileges); try { // create new instance of HiveAuthzBinding whose backend provider should be SimpleCacheProviderBackend return new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveServer2, hiveAuthzBinding.getHiveConf(), hiveAuthzBinding.getAuthzConf(), privilegeCache); } catch (Exception e) { LOG.error("Can not create HiveAuthzBinding with privilege cache."); throw new SemanticException(e); } } private static boolean hasPrefixMatch(List<String> prefixList, final String str) { for (String prefix : prefixList) { if (str.startsWith(prefix)) { return true; } } return false; } /** * Set the Serde URI privileges. If the URI privileges are not set, which serdeURI will be null, * the URI authorization checks will be skipped. */ private void setSerdeURI(String serdeClassName) throws SemanticException { if (!serdeURIPrivilegesEnabled) { return; } // WhiteList Serde Jar can be used by any users. WhiteList checking is // done by comparing the Java package name. The assumption is cluster // admin will ensure there is no Java namespace collision. // e.g org.apache.hadoop.hive.serde2 is used by hive and cluster admin should // ensure no custom Serde class is introduced under the same namespace. if (!hasPrefixMatch(serdeWhiteList, serdeClassName)) { try { CodeSource serdeSrc = Class.forName(serdeClassName, true, Utilities.getSessionSpecifiedClassLoader()).getProtectionDomain().getCodeSource(); if (serdeSrc == null) { throw new SemanticException("Could not resolve the jar for Serde class " + serdeClassName); } String serdeJar = serdeSrc.getLocation().getPath(); if (serdeJar == null || serdeJar.isEmpty()) { throw new SemanticException("Could not find the jar for Serde class " + serdeClassName + "to validate privileges"); } serdeURI = parseURI(serdeSrc.getLocation().toString(), true); } catch (ClassNotFoundException e) { throw new SemanticException("Error retrieving Serde class:" + e.getMessage(), e); } } } }
package com.github.kongchen.swagger.docgen.reader; import com.github.kongchen.swagger.docgen.GenerateException; import com.github.kongchen.swagger.docgen.spring.SpringResource; import com.github.kongchen.swagger.docgen.util.SpringUtils; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiResponses; import io.swagger.annotations.Authorization; import io.swagger.annotations.AuthorizationScope; import io.swagger.converter.ModelConverters; import io.swagger.models.Model; import io.swagger.models.Operation; import io.swagger.models.Response; import io.swagger.models.SecurityRequirement; import io.swagger.models.Swagger; import io.swagger.models.Tag; import io.swagger.models.parameters.Parameter; import io.swagger.models.properties.Property; import io.swagger.models.properties.RefProperty; import org.apache.maven.plugin.logging.Log; import org.codehaus.plexus.util.StringUtils; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseStatus; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; public class SpringMvcApiReader extends AbstractReader implements ClassSwaggerReader { private static final ResponseContainerConverter RESPONSE_CONTAINER_CONVERTER = new ResponseContainerConverter(); private String resourcePath; public SpringMvcApiReader(Swagger swagger, Log log) { super(swagger, log); } @Override public Swagger read(Set<Class<?>> classes) throws GenerateException { //relate all methods to one base request mapping if multiple controllers exist for that mapping //get all methods from each controller & find their request mapping //create map - resource string (after first slash) as key, new SpringResource as value Map<String, SpringResource> resourceMap = generateResourceMap(classes); for (String str : resourceMap.keySet()) { SpringResource resource = resourceMap.get(str); read(resource); } return swagger; } public Swagger read(SpringResource resource) { if (swagger == null) { swagger = new Swagger(); } List<Method> methods = resource.getMethods(); Map<String, Tag> tags = new HashMap<String, Tag>(); List<SecurityRequirement> resourceSecurities = new ArrayList<SecurityRequirement>(); // Add the description from the controller api Class<?> controller = resource.getControllerClass(); RequestMapping controllerRM = AnnotationUtils.findAnnotation(controller, RequestMapping.class); String[] controllerProduces = new String[0]; String[] controllerConsumes = new String[0]; if (controllerRM != null) { controllerConsumes = controllerRM.consumes(); controllerProduces = controllerRM.produces(); } if (controller.isAnnotationPresent(Api.class)) { Api api = AnnotationUtils.findAnnotation(controller, Api.class); if (!canReadApi(false, api)) { return swagger; } tags = updateTagsForApi(null, api); resourceSecurities = getSecurityRequirements(api); } resourcePath = resource.getControllerMapping(); //collect api from method with @RequestMapping Map<String, List<Method>> apiMethodMap = collectApisByRequestMapping(methods); for (String path : apiMethodMap.keySet()) { for (Method method : apiMethodMap.get(path)) { RequestMapping requestMapping = AnnotationUtils.findAnnotation(method, RequestMapping.class); if (requestMapping == null) { continue; } ApiOperation apiOperation = AnnotationUtils.findAnnotation(method, ApiOperation.class); if (apiOperation == null || apiOperation.hidden()) { continue; } Map<String, String> regexMap = new HashMap<String, String>(); String operationPath = parseOperationPath(path, regexMap); //http method for (RequestMethod requestMethod : requestMapping.method()) { String httpMethod = requestMethod.toString().toLowerCase(); Operation operation = parseMethod(method); updateOperationParameters(new ArrayList<Parameter>(), regexMap, operation); updateOperationProtocols(apiOperation, operation); String[] apiProduces = requestMapping.produces(); String[] apiConsumes = requestMapping.consumes(); apiProduces = (apiProduces.length == 0) ? controllerProduces : apiProduces; apiConsumes = (apiConsumes.length == 0) ? controllerConsumes : apiConsumes; apiConsumes = updateOperationConsumes(new String[0], apiConsumes, operation); apiProduces = updateOperationProduces(new String[0], apiProduces, operation); updateTagsForOperation(operation, apiOperation); updateOperation(apiConsumes, apiProduces, tags, resourceSecurities, operation); updatePath(operationPath, httpMethod, operation); } } } return swagger; } private Operation parseMethod(Method method) { Operation operation = new Operation(); RequestMapping requestMapping = AnnotationUtils.findAnnotation(method, RequestMapping.class); Type responseClass = null; List<String> produces = new ArrayList<String>(); List<String> consumes = new ArrayList<String>(); String responseContainer = null; String operationId = method.getName(); ApiOperation apiOperation = AnnotationUtils.findAnnotation(method, ApiOperation.class); if (apiOperation.hidden()) { return null; } if (!apiOperation.nickname().isEmpty()) { operationId = apiOperation.nickname(); } Map<String, Property> defaultResponseHeaders = parseResponseHeaders(apiOperation.responseHeaders()); operation.summary(apiOperation.value()).description(apiOperation.notes()); Set<Map<String, Object>> customExtensions = parseCustomExtensions(apiOperation.extensions()); for (Map<String, Object> extension : customExtensions) { if (extension == null) { continue; } for (Map.Entry<String, Object> map : extension.entrySet()) { operation.setVendorExtension( map.getKey().startsWith("x-") ? map.getKey() : "x-" + map.getKey(), map.getValue() ); } } if (!apiOperation.response().equals(Void.class)) { responseClass = apiOperation.response(); } if (!apiOperation.responseContainer().isEmpty()) { responseContainer = apiOperation.responseContainer(); } ///security List<SecurityRequirement> securities = new ArrayList<SecurityRequirement>(); for (Authorization auth : apiOperation.authorizations()) { if (!auth.value().isEmpty()) { SecurityRequirement security = new SecurityRequirement(); security.setName(auth.value()); for (AuthorizationScope scope : auth.scopes()) { if (!scope.scope().isEmpty()) { security.addScope(scope.scope()); } } securities.add(security); } } for (SecurityRequirement sec : securities) { operation.security(sec); } if (responseClass == null) { // pick out response from method declaration LOG.info("picking up response class from method " + method); responseClass = method.getGenericReturnType(); } if (responseClass instanceof ParameterizedType && ResponseEntity.class.equals(((ParameterizedType) responseClass).getRawType())) { responseClass = ((ParameterizedType) responseClass).getActualTypeArguments()[0]; } boolean hasApiAnnotation = false; if (responseClass instanceof Class) { hasApiAnnotation = AnnotationUtils.findAnnotation((Class) responseClass, Api.class) != null; } if (responseClass != null && !responseClass.equals(Void.class) && !responseClass.equals(ResponseEntity.class) && !hasApiAnnotation) { if (isPrimitive(responseClass)) { Property property = ModelConverters.getInstance().readAsProperty(responseClass); if (property != null) { Property responseProperty = RESPONSE_CONTAINER_CONVERTER.withResponseContainer(responseContainer, property); operation.response(apiOperation.code(), new Response() .description("successful operation") .schema(responseProperty) .headers(defaultResponseHeaders)); } } else if (!responseClass.equals(Void.class) && !responseClass.equals(void.class)) { Map<String, Model> models = ModelConverters.getInstance().read(responseClass); if (models.isEmpty()) { Property pp = ModelConverters.getInstance().readAsProperty(responseClass); operation.response(apiOperation.code(), new Response() .description("successful operation") .schema(pp) .headers(defaultResponseHeaders)); } for (String key : models.keySet()) { Property responseProperty = RESPONSE_CONTAINER_CONVERTER.withResponseContainer(responseContainer, new RefProperty().asDefault(key)); operation.response(apiOperation.code(), new Response() .description("successful operation") .schema(responseProperty) .headers(defaultResponseHeaders)); swagger.model(key, models.get(key)); } models = ModelConverters.getInstance().readAll(responseClass); for (Map.Entry<String, Model> entry : models.entrySet()) { swagger.model(entry.getKey(), entry.getValue()); } } } operation.operationId(operationId); for (String str : requestMapping.produces()) { if (!produces.contains(str)) { produces.add(str); } } for (String str : requestMapping.consumes()) { if (!consumes.contains(str)) { consumes.add(str); } } ApiResponses responseAnnotation = AnnotationUtils.findAnnotation(method, ApiResponses.class); if (responseAnnotation != null) { updateApiResponse(operation, responseAnnotation); } else { ResponseStatus responseStatus = AnnotationUtils.findAnnotation(method, ResponseStatus.class); if (responseStatus != null) { operation.response(responseStatus.value().value(), new Response().description(responseStatus.reason())); } } Deprecated annotation = AnnotationUtils.findAnnotation(method, Deprecated.class); if (annotation != null) { operation.deprecated(true); } // FIXME `hidden` is never used boolean hidden = false; if (apiOperation != null) { hidden = apiOperation.hidden(); } // process parameters Class[] parameterTypes = method.getParameterTypes(); Type[] genericParameterTypes = method.getGenericParameterTypes(); Annotation[][] paramAnnotations = method.getParameterAnnotations(); // paramTypes = method.getParameterTypes // genericParamTypes = method.getGenericParameterTypes for (int i = 0; i < parameterTypes.length; i++) { Type type = genericParameterTypes[i]; List<Annotation> annotations = Arrays.asList(paramAnnotations[i]); List<Parameter> parameters = getParameters(type, annotations); for (Parameter parameter : parameters) { operation.parameter(parameter); } } if (operation.getResponses() == null) { operation.defaultResponse(new Response().description("successful operation")); } // Process @ApiImplicitParams this.readImplicitParameters(method, operation); processOperationDecorator(operation, method); return operation; } private Map<String, List<Method>> collectApisByRequestMapping(List<Method> methods) { Map<String, List<Method>> apiMethodMap = new HashMap<String, List<Method>>(); for (Method method : methods) { if (method.isAnnotationPresent(RequestMapping.class)) { RequestMapping requestMapping = AnnotationUtils.findAnnotation(method, RequestMapping.class); String path; if (requestMapping.value().length != 0) { path = generateFullPath(requestMapping.value()[0]); } else { path = resourcePath; } if (apiMethodMap.containsKey(path)) { apiMethodMap.get(path).add(method); } else { List<Method> ms = new ArrayList<Method>(); ms.add(method); apiMethodMap.put(path, ms); } } } return apiMethodMap; } private String generateFullPath(String path) { if (StringUtils.isNotEmpty(path)) { return this.resourcePath + (path.startsWith("/") ? path : '/' + path); } else { return this.resourcePath; } } @Deprecated // TODO: Delete method never used private Class<?> getGenericSubtype(Class<?> clazz, Type type) { if (!(clazz.getName().equals("void") || type.toString().equals("void"))) { try { ParameterizedType paramType = (ParameterizedType) type; Type[] argTypes = paramType.getActualTypeArguments(); if (argTypes.length > 0) { return (Class<?>) argTypes[0]; } } catch (ClassCastException e) { //FIXME: find out why this happens to only certain types } } return clazz; } //Helper method for loadDocuments() private Map<String, SpringResource> analyzeController(Class<?> controllerClazz, Map<String, SpringResource> resourceMap, String description) { String[] controllerRequestMappingValues = SpringUtils.getControllerResquestMapping(controllerClazz); // Iterate over all value attributes of the class-level RequestMapping annotation for (String controllerRequestMappingValue : controllerRequestMappingValues) { for (Method method : controllerClazz.getMethods()) { RequestMapping methodRequestMapping = AnnotationUtils.findAnnotation(method, RequestMapping.class); // Look for method-level @RequestMapping annotation if (methodRequestMapping != null) { RequestMethod[] requestMappingRequestMethods = methodRequestMapping.method(); // For each method-level @RequestMapping annotation, iterate over HTTP Verb for (RequestMethod requestMappingRequestMethod : requestMappingRequestMethods) { String[] methodRequestMappingValues = methodRequestMapping.value(); // Check for cases where method-level @RequestMapping#value is not set, and use the controllers @RequestMapping if (methodRequestMappingValues.length == 0) { // The map key is a concat of the following: // 1. The controller package // 2. The controller class name // 3. The controller-level @RequestMapping#value String resourceKey = controllerClazz.getCanonicalName() + controllerRequestMappingValue + requestMappingRequestMethod; if (!resourceMap.containsKey(resourceKey)) { resourceMap.put( resourceKey, new SpringResource(controllerClazz, controllerRequestMappingValue, resourceKey, description)); } resourceMap.get(resourceKey).addMethod(method); } else { // Here we know that method-level @RequestMapping#value is populated, so // iterate over all the @RequestMapping#value attributes, and add them to the resource map. for (String methodRequestMappingValue : methodRequestMappingValues) { String resourceKey = controllerClazz.getCanonicalName() + controllerRequestMappingValue + methodRequestMappingValue + requestMappingRequestMethod; if (!methodRequestMappingValue.isEmpty()) { if (!resourceMap.containsKey(resourceKey)) { resourceMap.put(resourceKey, new SpringResource(controllerClazz, methodRequestMappingValue, resourceKey, description)); } resourceMap.get(resourceKey).addMethod(method); } } } } } } } controllerClazz.getFields(); controllerClazz.getDeclaredFields(); //<--In case developer declares a field without an associated getter/setter. //this will allow NoClassDefFoundError to be caught before it triggers bamboo failure. return resourceMap; } protected Map<String, SpringResource> generateResourceMap(Set<Class<?>> validClasses) throws GenerateException { Map<String, SpringResource> resourceMap = new HashMap<String, SpringResource>(); for (Class<?> aClass : validClasses) { RequestMapping requestMapping = AnnotationUtils.findAnnotation(aClass, RequestMapping.class); //This try/catch block is to stop a bamboo build from failing due to NoClassDefFoundError //This occurs when a class or method loaded by reflections contains a type that has no dependency try { resourceMap = analyzeController(aClass, resourceMap, ""); List<Method> mList = new ArrayList<Method>(Arrays.asList(aClass.getMethods())); if (aClass.getSuperclass() != null) { mList.addAll(Arrays.asList(aClass.getSuperclass().getMethods())); } } catch (NoClassDefFoundError e) { LOG.error(e.getMessage()); LOG.info(aClass.getName()); //exception occurs when a method type or annotation is not recognized by the plugin } } return resourceMap; } }
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.messaging.simp.stomp; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.Principal; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.springframework.lang.Nullable; import org.springframework.messaging.Message; import org.springframework.messaging.simp.SimpMessageHeaderAccessor; import org.springframework.messaging.simp.SimpMessageType; import org.springframework.messaging.support.MessageHeaderAccessor; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; import org.springframework.util.MimeType; import org.springframework.util.MimeTypeUtils; import org.springframework.util.StringUtils; /** * A {@code MessageHeaderAccessor} to use when creating a {@code Message} from * a decoded STOMP frame, or when encoding a {@code Message} to a STOMP frame. * * <p>When created from STOMP frame content, the actual STOMP headers are * stored in the native header sub-map managed by the parent class * {@link org.springframework.messaging.support.NativeMessageHeaderAccessor} * while the parent class {@link SimpMessageHeaderAccessor} manages common * processing headers some of which are based on STOMP headers * (e.g. destination, content-type, etc). * * <p>An instance of this class can also be created by wrapping an existing * {@code Message}. That message may have been created with the more generic * {@link org.springframework.messaging.simp.SimpMessageHeaderAccessor} in * which case STOMP headers are created from common processing headers. * In this case it is also necessary to invoke either * {@link #updateStompCommandAsClientMessage()} or * {@link #updateStompCommandAsServerMessage()} if sending a message and * depending on whether a message is sent to a client or the message broker. * * @author Rossen Stoyanchev * @since 4.0 */ public class StompHeaderAccessor extends SimpMessageHeaderAccessor { private static final AtomicLong messageIdCounter = new AtomicLong(); private static final long[] DEFAULT_HEARTBEAT = new long[] {0, 0}; // STOMP header names public static final String STOMP_ID_HEADER = "id"; public static final String STOMP_HOST_HEADER = "host"; public static final String STOMP_ACCEPT_VERSION_HEADER = "accept-version"; public static final String STOMP_MESSAGE_ID_HEADER = "message-id"; public static final String STOMP_RECEIPT_HEADER = "receipt"; // any client frame except CONNECT public static final String STOMP_RECEIPT_ID_HEADER = "receipt-id"; // RECEIPT frame public static final String STOMP_SUBSCRIPTION_HEADER = "subscription"; public static final String STOMP_VERSION_HEADER = "version"; public static final String STOMP_MESSAGE_HEADER = "message"; public static final String STOMP_ACK_HEADER = "ack"; public static final String STOMP_NACK_HEADER = "nack"; public static final String STOMP_LOGIN_HEADER = "login"; public static final String STOMP_PASSCODE_HEADER = "passcode"; public static final String STOMP_DESTINATION_HEADER = "destination"; public static final String STOMP_CONTENT_TYPE_HEADER = "content-type"; public static final String STOMP_CONTENT_LENGTH_HEADER = "content-length"; public static final String STOMP_HEARTBEAT_HEADER = "heart-beat"; // Other header names private static final String COMMAND_HEADER = "stompCommand"; private static final String CREDENTIALS_HEADER = "stompCredentials"; /** * A constructor for creating message headers from a parsed STOMP frame. */ StompHeaderAccessor(StompCommand command, @Nullable Map<String, List<String>> externalSourceHeaders) { super(command.getMessageType(), externalSourceHeaders); setHeader(COMMAND_HEADER, command); updateSimpMessageHeadersFromStompHeaders(); } /** * A constructor for accessing and modifying existing message headers. * Note that the message headers may not have been created from a STOMP frame * but may have rather originated from using the more generic * {@link org.springframework.messaging.simp.SimpMessageHeaderAccessor}. */ StompHeaderAccessor(Message<?> message) { super(message); updateStompHeadersFromSimpMessageHeaders(); } StompHeaderAccessor() { super(SimpMessageType.HEARTBEAT, null); } void updateSimpMessageHeadersFromStompHeaders() { if (getNativeHeaders() == null) { return; } String value = getFirstNativeHeader(STOMP_DESTINATION_HEADER); if (value != null) { super.setDestination(value); } value = getFirstNativeHeader(STOMP_CONTENT_TYPE_HEADER); if (value != null) { super.setContentType(MimeTypeUtils.parseMimeType(value)); } StompCommand command = getCommand(); if (StompCommand.MESSAGE.equals(command)) { value = getFirstNativeHeader(STOMP_SUBSCRIPTION_HEADER); if (value != null) { super.setSubscriptionId(value); } } else if (StompCommand.SUBSCRIBE.equals(command) || StompCommand.UNSUBSCRIBE.equals(command)) { value = getFirstNativeHeader(STOMP_ID_HEADER); if (value != null) { super.setSubscriptionId(value); } } else if (StompCommand.CONNECT.equals(command) || StompCommand.STOMP.equals(command)) { protectPasscode(); } } void updateStompHeadersFromSimpMessageHeaders() { String destination = getDestination(); if (destination != null) { setNativeHeader(STOMP_DESTINATION_HEADER, destination); } MimeType contentType = getContentType(); if (contentType != null) { setNativeHeader(STOMP_CONTENT_TYPE_HEADER, contentType.toString()); } trySetStompHeaderForSubscriptionId(); } @Override protected MessageHeaderAccessor createAccessor(Message<?> message) { return wrap(message); } // Redeclared for visibility within simp.stomp @Override @Nullable protected Map<String, List<String>> getNativeHeaders() { return super.getNativeHeaders(); } public StompCommand updateStompCommandAsClientMessage() { SimpMessageType messageType = getMessageType(); if (messageType != SimpMessageType.MESSAGE) { throw new IllegalStateException("Unexpected message type " + messageType); } StompCommand command = getCommand(); if (command == null) { command = StompCommand.SEND; setHeader(COMMAND_HEADER, command); } else if (!command.equals(StompCommand.SEND)) { throw new IllegalStateException("Unexpected STOMP command " + command); } return command; } public void updateStompCommandAsServerMessage() { SimpMessageType messageType = getMessageType(); if (messageType != SimpMessageType.MESSAGE) { throw new IllegalStateException("Unexpected message type " + messageType); } StompCommand command = getCommand(); if ((command == null) || StompCommand.SEND.equals(command)) { setHeader(COMMAND_HEADER, StompCommand.MESSAGE); } else if (!StompCommand.MESSAGE.equals(command)) { throw new IllegalStateException("Unexpected STOMP command " + command); } trySetStompHeaderForSubscriptionId(); if (getMessageId() == null) { String messageId = getSessionId() + '-' + messageIdCounter.getAndIncrement(); setNativeHeader(STOMP_MESSAGE_ID_HEADER, messageId); } } /** * Return the STOMP command, or {@code null} if not yet set. */ @Nullable public StompCommand getCommand() { return (StompCommand) getHeader(COMMAND_HEADER); } public boolean isHeartbeat() { return (SimpMessageType.HEARTBEAT == getMessageType()); } public long[] getHeartbeat() { String rawValue = getFirstNativeHeader(STOMP_HEARTBEAT_HEADER); String[] rawValues = StringUtils.split(rawValue, ","); if (rawValues == null) { return Arrays.copyOf(DEFAULT_HEARTBEAT, 2); } return new long[] {Long.parseLong(rawValues[0]), Long.parseLong(rawValues[1])}; } public void setAcceptVersion(String acceptVersion) { setNativeHeader(STOMP_ACCEPT_VERSION_HEADER, acceptVersion); } public Set<String> getAcceptVersion() { String rawValue = getFirstNativeHeader(STOMP_ACCEPT_VERSION_HEADER); return (rawValue != null ? StringUtils.commaDelimitedListToSet(rawValue) : Collections.emptySet()); } public void setHost(@Nullable String host) { setNativeHeader(STOMP_HOST_HEADER, host); } @Nullable public String getHost() { return getFirstNativeHeader(STOMP_HOST_HEADER); } @Override public void setDestination(@Nullable String destination) { super.setDestination(destination); setNativeHeader(STOMP_DESTINATION_HEADER, destination); } @Override public void setContentType(MimeType contentType) { super.setContentType(contentType); setNativeHeader(STOMP_CONTENT_TYPE_HEADER, contentType.toString()); } @Override public void setSubscriptionId(@Nullable String subscriptionId) { super.setSubscriptionId(subscriptionId); trySetStompHeaderForSubscriptionId(); } private void trySetStompHeaderForSubscriptionId() { String subscriptionId = getSubscriptionId(); if (subscriptionId != null) { StompCommand command = getCommand(); if (command != null && StompCommand.MESSAGE.equals(command)) { setNativeHeader(STOMP_SUBSCRIPTION_HEADER, subscriptionId); } else { SimpMessageType messageType = getMessageType(); if (SimpMessageType.SUBSCRIBE.equals(messageType) || SimpMessageType.UNSUBSCRIBE.equals(messageType)) { setNativeHeader(STOMP_ID_HEADER, subscriptionId); } } } } @Nullable public Integer getContentLength() { String header = getFirstNativeHeader(STOMP_CONTENT_LENGTH_HEADER); return (header != null ? Integer.valueOf(header) : null); } public void setContentLength(int contentLength) { setNativeHeader(STOMP_CONTENT_LENGTH_HEADER, String.valueOf(contentLength)); } public void setHeartbeat(long cx, long cy) { setNativeHeader(STOMP_HEARTBEAT_HEADER, cx + "," + cy); } public void setAck(@Nullable String ack) { setNativeHeader(STOMP_ACK_HEADER, ack); } @Nullable public String getAck() { return getFirstNativeHeader(STOMP_ACK_HEADER); } public void setNack(@Nullable String nack) { setNativeHeader(STOMP_NACK_HEADER, nack); } @Nullable public String getNack() { return getFirstNativeHeader(STOMP_NACK_HEADER); } public void setLogin(@Nullable String login) { setNativeHeader(STOMP_LOGIN_HEADER, login); } @Nullable public String getLogin() { return getFirstNativeHeader(STOMP_LOGIN_HEADER); } public void setPasscode(@Nullable String passcode) { setNativeHeader(STOMP_PASSCODE_HEADER, passcode); protectPasscode(); } private void protectPasscode() { String value = getFirstNativeHeader(STOMP_PASSCODE_HEADER); if (value != null && !"PROTECTED".equals(value)) { setHeader(CREDENTIALS_HEADER, new StompPasscode(value)); setNativeHeader(STOMP_PASSCODE_HEADER, "PROTECTED"); } } /** * Return the passcode header value, or {@code null} if not set. */ @Nullable public String getPasscode() { StompPasscode credentials = (StompPasscode) getHeader(CREDENTIALS_HEADER); return (credentials != null ? credentials.passcode : null); } public void setReceiptId(@Nullable String receiptId) { setNativeHeader(STOMP_RECEIPT_ID_HEADER, receiptId); } @Nullable public String getReceiptId() { return getFirstNativeHeader(STOMP_RECEIPT_ID_HEADER); } public void setReceipt(@Nullable String receiptId) { setNativeHeader(STOMP_RECEIPT_HEADER, receiptId); } @Nullable public String getReceipt() { return getFirstNativeHeader(STOMP_RECEIPT_HEADER); } @Nullable public String getMessage() { return getFirstNativeHeader(STOMP_MESSAGE_HEADER); } public void setMessage(@Nullable String content) { setNativeHeader(STOMP_MESSAGE_HEADER, content); } @Nullable public String getMessageId() { return getFirstNativeHeader(STOMP_MESSAGE_ID_HEADER); } public void setMessageId(@Nullable String id) { setNativeHeader(STOMP_MESSAGE_ID_HEADER, id); } @Nullable public String getVersion() { return getFirstNativeHeader(STOMP_VERSION_HEADER); } public void setVersion(@Nullable String version) { setNativeHeader(STOMP_VERSION_HEADER, version); } // Logging related @Override public String getShortLogMessage(Object payload) { StompCommand command = getCommand(); if (StompCommand.SUBSCRIBE.equals(command)) { return "SUBSCRIBE " + getDestination() + " id=" + getSubscriptionId() + appendSession(); } else if (StompCommand.UNSUBSCRIBE.equals(command)) { return "UNSUBSCRIBE id=" + getSubscriptionId() + appendSession(); } else if (StompCommand.SEND.equals(command)) { return "SEND " + getDestination() + appendSession() + appendPayload(payload); } else if (StompCommand.CONNECT.equals(command)) { Principal user = getUser(); return "CONNECT" + (user != null ? " user=" + user.getName() : "") + appendSession(); } else if (StompCommand.STOMP.equals(command)) { Principal user = getUser(); return "STOMP" + (user != null ? " user=" + user.getName() : "") + appendSession(); } else if (StompCommand.CONNECTED.equals(command)) { return "CONNECTED heart-beat=" + Arrays.toString(getHeartbeat()) + appendSession(); } else if (StompCommand.DISCONNECT.equals(command)) { String receipt = getReceipt(); return "DISCONNECT" + (receipt != null ? " receipt=" + receipt : "") + appendSession(); } else { return getDetailedLogMessage(payload); } } @Override public String getDetailedLogMessage(@Nullable Object payload) { if (isHeartbeat()) { String sessionId = getSessionId(); return "heart-beat" + (sessionId != null ? " in session " + sessionId : ""); } StompCommand command = getCommand(); if (command == null) { return super.getDetailedLogMessage(payload); } StringBuilder sb = new StringBuilder(); sb.append(command.name()).append(' '); Map<String, List<String>> nativeHeaders = getNativeHeaders(); if (nativeHeaders != null) { sb.append(nativeHeaders); } sb.append(appendSession()); if (getUser() != null) { sb.append(", user=").append(getUser().getName()); } if (payload != null && command.isBodyAllowed()) { sb.append(appendPayload(payload)); } return sb.toString(); } private String appendSession() { return " session=" + getSessionId(); } private String appendPayload(Object payload) { if (payload.getClass() != byte[].class) { throw new IllegalStateException( "Expected byte array payload but got: " + ClassUtils.getQualifiedName(payload.getClass())); } byte[] bytes = (byte[]) payload; MimeType mimeType = getContentType(); String contentType = (mimeType != null ? " " + mimeType.toString() : ""); if (bytes.length == 0 || mimeType == null || !isReadableContentType()) { return contentType; } Charset charset = mimeType.getCharset(); charset = (charset != null ? charset : StandardCharsets.UTF_8); return (bytes.length < 80) ? contentType + " payload=" + new String(bytes, charset) : contentType + " payload=" + new String(Arrays.copyOf(bytes, 80), charset) + "...(truncated)"; } // Static factory methods and accessors /** * Create an instance for the given STOMP command. */ public static StompHeaderAccessor create(StompCommand command) { return new StompHeaderAccessor(command, null); } /** * Create an instance for the given STOMP command and headers. */ public static StompHeaderAccessor create(StompCommand command, Map<String, List<String>> headers) { return new StompHeaderAccessor(command, headers); } /** * Create headers for a heartbeat. While a STOMP heartbeat frame does not * have headers, a session id is needed for processing purposes at a minimum. */ public static StompHeaderAccessor createForHeartbeat() { return new StompHeaderAccessor(); } /** * Create an instance from the payload and headers of the given Message. */ public static StompHeaderAccessor wrap(Message<?> message) { return new StompHeaderAccessor(message); } /** * Return the STOMP command from the given headers, or {@code null} if not set. */ @Nullable public static StompCommand getCommand(Map<String, Object> headers) { return (StompCommand) headers.get(COMMAND_HEADER); } /** * Return the passcode header value, or {@code null} if not set. */ @Nullable public static String getPasscode(Map<String, Object> headers) { StompPasscode credentials = (StompPasscode) headers.get(CREDENTIALS_HEADER); return (credentials != null ? credentials.passcode : null); } @Nullable public static Integer getContentLength(Map<String, List<String>> nativeHeaders) { List<String> values = nativeHeaders.get(STOMP_CONTENT_LENGTH_HEADER); return (!CollectionUtils.isEmpty(values) ? Integer.valueOf(values.get(0)) : null); } private static class StompPasscode { private final String passcode; public StompPasscode(String passcode) { this.passcode = passcode; } @Override public String toString() { return "[PROTECTED]"; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.curator.framework.imps; import com.google.common.annotations.VisibleForTesting; import org.apache.curator.framework.api.CompressionProvider; import java.io.EOFException; import java.io.IOException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.zip.*; public class GzipCompressionProvider implements CompressionProvider { // This class re-implements java.util.zip.GZIPInputStream and GZIPOutputStream functionality to avoid // creation many finalized Deflater and Inflater objects on heap (see // https://issues.apache.org/jira/browse/CURATOR-487). Even when Curator's minimum supported Java version becomes // no less than Java 12, where finalize() methods are removed in Deflater and Inflater classes and instead they // are phantom-referenced via Cleaner, it still makes sense to avoid GZIPInputStream and GZIPOutputStream because // phantom references are also not entirely free for GC algorithms, and also to allocate less garbage and make // less unnecessary data copies. private static final int MAX_SAFE_JAVA_BYTE_ARRAY_SIZE = Integer.MAX_VALUE - 128; /** GZIP header magic number. */ private static final int GZIP_MAGIC = 0x8b1f; /** See {@code java.util.zip.GZIPOutputStream.writeHeader()} */ private static final byte[] GZIP_HEADER = new byte[] { (byte) GZIP_MAGIC, // Magic number (byte 0) (byte) (GZIP_MAGIC >> 8), // Magic number (byte 1) Deflater.DEFLATED, // Compression method (CM) 0, // Flags (FLG) 0, // Modification time MTIME (byte 0) 0, // Modification time MTIME (byte 1) 0, // Modification time MTIME (byte 2) 0, // Modification time MTIME (byte 3) 0, // Extra flags (XFLG) 0 // Operating system (OS) }; /** GZip flags, {@link #GZIP_HEADER}'s 4th byte */ private static final int FHCRC = 1 << 1; private static final int FEXTRA = 1 << 2; private static final int FNAME = 1 << 3; private static final int FCOMMENT = 1 << 4; private static final int GZIP_HEADER_SIZE = GZIP_HEADER.length; /** 32-bit CRC and uncompressed data size */ private static final int GZIP_TRAILER_SIZE = Integer.BYTES + Integer.BYTES; /** DEFLATE doesn't produce shorter compressed data */ private static final int MIN_COMPRESSED_DATA_SIZE = 2; /** * Since Deflaters and Inflaters are acquired and returned to the pools in try-finally blocks that are free of * blocking calls themselves, it's not expected that the number of objects in the pools could exceed the number of * hardware threads on the machine much. Therefore it's accepted to have simple "ever-growing" (in fact, no) pools * of strongly-referenced objects. */ private static final ConcurrentLinkedQueue<Deflater> DEFLATER_POOL = new ConcurrentLinkedQueue<>(); private static final ConcurrentLinkedQueue<Inflater> INFLATER_POOL = new ConcurrentLinkedQueue<>(); /** The value verified in GzipCompressionProviderTest.testEmpty() */ private static final byte[] COMPRESSED_EMPTY_BYTES = new byte[] { 31, -117, 8, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; private static Deflater acquireDeflater() { Deflater deflater = DEFLATER_POOL.poll(); if ( deflater == null ) { // Using the same settings as in GZIPOutputStream constructor deflater = new Deflater(Deflater.DEFAULT_COMPRESSION, true); } return deflater; } private static Inflater acquireInflater() { Inflater inflater = INFLATER_POOL.poll(); if ( inflater == null ) { // Using the same nowrap setting as GZIPInputStream constructor inflater = new Inflater(true); } return inflater; } @Override public byte[] compress(String path, byte[] data) { if ( data.length == 0 ) { // clone() because clients could update the array return COMPRESSED_EMPTY_BYTES.clone(); } return doCompress(data); } @VisibleForTesting static byte[] doCompress(byte[] data) { byte[] result = Arrays.copyOf(GZIP_HEADER, conservativeGZippedSizeEstimate(data.length)); Deflater deflater = acquireDeflater(); try { deflater.setInput(data); deflater.finish(); int offset = GZIP_HEADER_SIZE; while ( true ) { int available = result.length - GZIP_TRAILER_SIZE - offset; int numCompressedBytes = deflater.deflate(result, offset, available); offset += numCompressedBytes; if ( deflater.finished() ) { break; } int newResultLength = result.length + (result.length / 2); result = Arrays.copyOf(result, newResultLength); } // Write GZip trailer CRC32 crc = new CRC32(); crc.update(data, 0, data.length); writeLittleEndianInt(result, offset, (int) crc.getValue()); writeLittleEndianInt(result, offset + 4, data.length); int endOffset = offset + GZIP_TRAILER_SIZE; if ( result.length != endOffset ) { result = Arrays.copyOf(result, endOffset); } return result; } finally { deflater.reset(); DEFLATER_POOL.add(deflater); } } private static int conservativeGZippedSizeEstimate(int dataSize) { int conservativeCompressedDataSizeEstimate; if ( dataSize < 512 ) { // Assuming DEFLATE doesn't compress small data well conservativeCompressedDataSizeEstimate = Math.max(dataSize, MIN_COMPRESSED_DATA_SIZE); } else { // Assuming pretty bad 2:1 compression ratio conservativeCompressedDataSizeEstimate = Math.max(512, dataSize / 2); } return GZIP_HEADER_SIZE + conservativeCompressedDataSizeEstimate + GZIP_TRAILER_SIZE; } private static void writeLittleEndianInt(byte[] b, int offset, int v) { b[offset] = (byte) v; b[offset + 1] = (byte) (v >> 8); b[offset + 2] = (byte) (v >> 16); b[offset + 3] = (byte) (v >> 24); } @Override public byte[] decompress(String path, byte[] gzippedDataBytes) throws IOException { if ( Arrays.equals(gzippedDataBytes, COMPRESSED_EMPTY_BYTES) ) { // Allocating a new array instead of creating a static constant because clients may somehow depend on the // identity of the returned arrays return new byte[0]; } ByteBuffer gzippedData = ByteBuffer.wrap(gzippedDataBytes); gzippedData.order(ByteOrder.LITTLE_ENDIAN); int headerSize = readGzipHeader(gzippedData); if ( gzippedDataBytes.length < headerSize + MIN_COMPRESSED_DATA_SIZE + GZIP_TRAILER_SIZE ) { throw new EOFException("Too short GZipped data"); } int compressedDataSize = gzippedDataBytes.length - headerSize - GZIP_TRAILER_SIZE; // Assuming 3:1 compression ratio. Intentionally a more generous estimation than in // conservativeGZippedSizeEstimate() to reduce the probability of result array reallocation. int initialResultLength = (int) Math.min(compressedDataSize * 3L, MAX_SAFE_JAVA_BYTE_ARRAY_SIZE); byte[] result = new byte[initialResultLength]; Inflater inflater = acquireInflater(); try { inflater.setInput(gzippedDataBytes, headerSize, compressedDataSize); CRC32 crc = new CRC32(); int offset = 0; while (true) { int numDecompressedBytes; try { numDecompressedBytes = inflater.inflate(result, offset, result.length - offset); } catch (DataFormatException e) { String s = e.getMessage(); throw new ZipException(s != null ? s : "Invalid ZLIB data format"); } crc.update(result, offset, numDecompressedBytes); offset += numDecompressedBytes; if ( inflater.finished() || inflater.needsDictionary() ) { break; } // Just calling inflater.needsInput() doesn't work as expected, apparently it doesn't uphold it's own // contract and could have needsInput() == true if numDecompressedBytes != 0 and that just means that // there is not enough space in the result array else if ( numDecompressedBytes == 0 && inflater.needsInput() ) { throw new ZipException("Corrupt GZipped data"); } // Inflater's contract doesn't say whether it's able to be finished() without returning 0 from inflate() // call, so the additional `numDecompressedBytes == 0` condition ensures that we did another cycle and // definitely need to inflate some more bytes. if ( result.length == MAX_SAFE_JAVA_BYTE_ARRAY_SIZE && numDecompressedBytes == 0 ) { throw new OutOfMemoryError("Unable to uncompress that much data into a single byte[] array"); } int newResultLength = (int) Math.min((long) result.length + (result.length / 2), MAX_SAFE_JAVA_BYTE_ARRAY_SIZE); if ( result.length != newResultLength ) { result = Arrays.copyOf(result, newResultLength); } } if ( inflater.getRemaining() != 0 ) { throw new ZipException("Expected just one GZip block, without garbage in the end"); } int checksum = gzippedData.getInt(gzippedDataBytes.length - GZIP_TRAILER_SIZE); int numUncompressedBytes = gzippedData.getInt(gzippedDataBytes.length - Integer.BYTES); if ( checksum != (int) crc.getValue() || numUncompressedBytes != offset ) { throw new ZipException("Corrupt GZIP trailer"); } if ( result.length != offset ) { result = Arrays.copyOf(result, offset); } return result; } finally { inflater.reset(); INFLATER_POOL.add(inflater); } } /** * Returns the header size */ private static int readGzipHeader(ByteBuffer gzippedData) throws IOException { try { return doReadHeader(gzippedData); } catch (BufferUnderflowException e) { throw new EOFException(); } } private static int doReadHeader(ByteBuffer gzippedData) throws IOException { if ( gzippedData.getChar() != GZIP_MAGIC ) { throw new ZipException("Not in GZip format"); } if ( gzippedData.get() != Deflater.DEFLATED ) { throw new ZipException("Unsupported compression method"); } int flags = gzippedData.get(); // Skip MTIME, XFL, and OS fields skip(gzippedData, Integer.BYTES + Byte.BYTES + Byte.BYTES); if ( (flags & FEXTRA) != 0 ) { int extraBytes = gzippedData.getChar(); skip(gzippedData, extraBytes); } if ( (flags & FNAME) != 0 ) { skipZeroTerminatedString(gzippedData); } if ( (flags & FCOMMENT) != 0 ) { skipZeroTerminatedString(gzippedData); } if ( (flags & FHCRC) != 0 ) { CRC32 crc = new CRC32(); crc.update(gzippedData.array(), 0, gzippedData.position()); if ( gzippedData.getChar() != (char) crc.getValue() ) { throw new ZipException("Corrupt GZIP header"); } } return gzippedData.position(); } private static void skip(ByteBuffer gzippedData, int skipBytes) throws IOException { try { gzippedData.position(gzippedData.position() + skipBytes); } catch (IllegalArgumentException e) { throw new EOFException(); } } private static void skipZeroTerminatedString(ByteBuffer gzippedData) { while (gzippedData.get() != 0) { // loop } } }
package io.cattle.platform.inator.wrapper; import com.netflix.config.DynamicLongProperty; import io.cattle.platform.archaius.util.ArchaiusUtil; import io.cattle.platform.core.constants.InstanceConstants; import io.cattle.platform.core.model.Instance; import io.cattle.platform.inator.factory.InatorServices; import io.cattle.platform.object.meta.ObjectMetaDataManager; import io.cattle.platform.object.util.DataAccessor; import io.cattle.platform.object.util.TransitioningUtils; import io.cattle.platform.util.type.CollectionUtils; import io.github.ibuildthecloud.gdapi.util.DateUtils; import org.apache.cloudstack.managed.context.NoException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.format.DateTimeParseException; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; public class InstanceWrapper implements BasicStateWrapper { private static DynamicLongProperty MAX_BACKOFF = ArchaiusUtil.getLong("instance.restart.max.backoff.exponent"); private static final Logger log = LoggerFactory.getLogger(InstanceWrapper.class); private static Set<String> TO_STOP_STATES = CollectionUtils.set( InstanceConstants.STATE_STARTING, InstanceConstants.STATE_RUNNING); Instance instance; InatorServices svc; public InstanceWrapper(Instance instance, InatorServices svc) { super(); this.instance = instance; this.svc = svc; } @Override public boolean remove() { if (instance.getRemoved() != null) { return true; } if (TO_STOP_STATES.contains(instance.getState())) { svc.processManager.stopThenRemove(instance, null); return false; } if (isTransitioning()) { return false; } svc.processManager.remove(instance, null); return false; } @Override public void create() { Map<String, Object> obj = new HashMap<>(); obj.put(InstanceConstants.FIELD_CREATE_ONLY, true); svc.processManager.create(instance, obj); } public Long startBackoff() { Long restartCount = DataAccessor.fieldLong(instance, InstanceConstants.FIELD_START_RETRY_COUNT); Date lastStop = DataAccessor.fieldDate(instance, InstanceConstants.FIELD_STOPPED); if (restartCount != null && lastStop != null && restartCount > 0 && InstanceConstants.STATE_STOPPED.equals(instance.getState()) && shouldRestart()) { if (restartCount > MAX_BACKOFF.get()) { restartCount = MAX_BACKOFF.get(); } long runAfter = lastStop.getTime() + (long)(Math.pow(2, restartCount)*1000); if (runAfter > System.currentTimeMillis()) { svc.scheduledExecutorService.schedule((NoException)() -> { svc.triggerDeploymentUnitReconcile(instance.getDeploymentUnitId()); }, runAfter - System.currentTimeMillis(), TimeUnit.MILLISECONDS); return runAfter; } } return null; } @Override public void activate() { svc.processManager.start(instance, null); } @Override public void deactivate() { svc.processManager.stop(instance, null); } @Override public String getState() { return instance.getState(); } @Override public String getHealthState() { return instance.getHealthState(); } @Override public Date getRemoved() { return instance.getRemoved(); } @Override public ObjectMetaDataManager getMetadataManager() { return svc.objectMetadataManager; } public Long getId() { return instance.getId(); } public Instance getInternal() { return instance; } public Date getStartTime() { Object obj = CollectionUtils.getNestedValue(instance.getData(), "fields", "dockerInspect", "State", "StartedAt"); if (obj == null) { obj = DataAccessor.fieldDate(instance, InstanceConstants.FIELD_LAST_START); } if (obj instanceof String) { try { return DateUtils.parse((String) obj); } catch (DateTimeParseException e) { log.error("Failed to parse date [{}]", obj); return null; } } else if (obj instanceof Date) { return (Date) obj; } return null; } public String getDisplayName() { return String.format("%s (%s)", instance.getKind(), svc.idFormatter.formatId(instance.getKind(), instance.getId())); } public String getErrorMessage() { return TransitioningUtils.getTransitioningErrorMessage(instance); } public void setDesired(boolean desired) { boolean changed = false; if (instance.getDesired().booleanValue() != desired) { instance.setDesired(desired); changed = true; } if (desired && instance.getUpgradeTime() != null) { instance.setUpgradeTime(null); changed = true; } else if (!desired && instance.getUpgradeTime() == null) { instance.setUpgradeTime(new Date()); changed = true; } if (changed) { svc.objectManager.persist(instance); } } @Override public boolean isActive() { if (BasicStateWrapper.super.isActive()) { return true; } // Only consider restart policy after it's started once if (instance.getFirstRunning() == null) { if (isServiceManaged()) { return false; } else // For not service managed, never started, stopped and createOnly=true is considered active return InstanceConstants.STATE_STOPPED.equals(instance.getState()) && DataAccessor.fieldBool(instance, InstanceConstants.FIELD_CREATE_ONLY); } return !shouldRestart(); } protected boolean shouldRestart() { Boolean restart = DataAccessor.fieldBoolean(instance, InstanceConstants.FIELD_SHOULD_RESTART); return restart == null ? true : restart; } public List<String> getPorts() { return DataAccessor.fieldStringList(instance, InstanceConstants.FIELD_PORTS); } public boolean isKubernetes() { return InstanceConstants.isKubernetes(instance); } public String getLaunchConfigName() { return DataAccessor.fieldString(instance, InstanceConstants.FIELD_LAUNCH_CONFIG_NAME); } public boolean isServiceManaged() { return instance.getServiceId() != null; } public String getName() { return instance.getName(); } }
/* * Copyright (c) 2009-2012 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.bullet.control; import com.jme3.bullet.PhysicsSpace; import com.jme3.export.InputCapsule; import com.jme3.export.JmeExporter; import com.jme3.export.JmeImporter; import com.jme3.export.OutputCapsule; import com.jme3.math.Quaternion; import com.jme3.math.Vector3f; import com.jme3.renderer.RenderManager; import com.jme3.renderer.ViewPort; import com.jme3.scene.Spatial; import java.io.IOException; /** * AbstractPhysicsControl manages the lifecycle of a physics object that is * attached to a spatial in the SceneGraph. * * @author normenhansen */ public abstract class AbstractPhysicsControl implements PhysicsControl { private final Quaternion tmp_inverseWorldRotation = new Quaternion(); protected Spatial spatial; protected boolean enabled = true; protected boolean added = false; protected PhysicsSpace space = null; protected boolean applyLocal = false; /** * Called when the control is added to a new spatial, create any * spatial-dependent data here. * * @param spat The new spatial, guaranteed not to be null */ protected abstract void createSpatialData(Spatial spat); /** * Called when the control is removed from a spatial, remove any * spatial-dependent data here. * * @param spat The old spatial, guaranteed not to be null */ protected abstract void removeSpatialData(Spatial spat); /** * Called when the physics object is supposed to move to the spatial * position. * * @param vec */ protected abstract void setPhysicsLocation(Vector3f vec); /** * Called when the physics object is supposed to move to the spatial * rotation. * * @param quat */ protected abstract void setPhysicsRotation(Quaternion quat); /** * Called when the physics object is supposed to add all objects it needs to * manage to the physics space. * * @param space */ protected abstract void addPhysics(PhysicsSpace space); /** * Called when the physics object is supposed to remove all objects added to * the physics space. * * @param space */ protected abstract void removePhysics(PhysicsSpace space); public boolean isApplyPhysicsLocal() { return applyLocal; } /** * When set to true, the physics coordinates will be applied to the local * translation of the Spatial * * @param applyPhysicsLocal */ public void setApplyPhysicsLocal(boolean applyPhysicsLocal) { applyLocal = applyPhysicsLocal; } protected Vector3f getSpatialTranslation() { if (applyLocal) { return spatial.getLocalTranslation(); } return spatial.getWorldTranslation(); } protected Quaternion getSpatialRotation() { if (applyLocal) { return spatial.getLocalRotation(); } return spatial.getWorldRotation(); } /** * Applies a physics transform to the spatial * * @param worldLocation * @param worldRotation */ protected void applyPhysicsTransform(Vector3f worldLocation, Quaternion worldRotation) { if (enabled && spatial != null) { Vector3f localLocation = spatial.getLocalTranslation(); Quaternion localRotationQuat = spatial.getLocalRotation(); if (!applyLocal && spatial.getParent() != null) { localLocation.set(worldLocation).subtractLocal(spatial.getParent().getWorldTranslation()); localLocation.divideLocal(spatial.getParent().getWorldScale()); tmp_inverseWorldRotation.set(spatial.getParent().getWorldRotation()).inverseLocal().multLocal(localLocation); localRotationQuat.set(worldRotation); tmp_inverseWorldRotation.set(spatial.getParent().getWorldRotation()).inverseLocal().mult(localRotationQuat, localRotationQuat); spatial.setLocalTranslation(localLocation); spatial.setLocalRotation(localRotationQuat); } else { spatial.setLocalTranslation(worldLocation); spatial.setLocalRotation(worldRotation); } } } public void setSpatial(Spatial spatial) { if (this.spatial != null && this.spatial != spatial) { removeSpatialData(this.spatial); } else if (this.spatial == spatial) { return; } this.spatial = spatial; if (spatial == null) { return; } createSpatialData(this.spatial); setPhysicsLocation(getSpatialTranslation()); setPhysicsRotation(getSpatialRotation()); } public void setEnabled(boolean enabled) { this.enabled = enabled; if (space != null) { if (enabled && !added) { if (spatial != null) { setPhysicsLocation(getSpatialTranslation()); setPhysicsRotation(getSpatialRotation()); } addPhysics(space); added = true; } else if (!enabled && added) { removePhysics(space); added = false; } } } public boolean isEnabled() { return enabled; } public void update(float tpf) { } public void render(RenderManager rm, ViewPort vp) { } public void setPhysicsSpace(PhysicsSpace space) { if (space == null) { if (this.space != null) { removePhysics(this.space); added = false; } } else { if (this.space == space) { return; } else if (this.space != null) { removePhysics(this.space); } addPhysics(space); added = true; } this.space = space; } public PhysicsSpace getPhysicsSpace() { return space; } @Override public void write(JmeExporter ex) throws IOException { OutputCapsule oc = ex.getCapsule(this); oc.write(enabled, "enabled", true); oc.write(applyLocal, "applyLocalPhysics", false); oc.write(spatial, "spatial", null); } @Override public void read(JmeImporter im) throws IOException { InputCapsule ic = im.getCapsule(this); enabled = ic.readBoolean("enabled", true); spatial = (Spatial) ic.readSavable("spatial", null); applyLocal = ic.readBoolean("applyLocalPhysics", false); } }
/* * The MIT License * * Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi, Alan Harder * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson; import hudson.FilePath.TarCompression; import hudson.model.TaskListener; import hudson.remoting.VirtualChannel; import hudson.util.NullStream; import org.apache.commons.io.FileUtils; import org.apache.commons.io.output.NullOutputStream; import org.apache.tools.ant.Project; import org.apache.tools.ant.taskdefs.Chmod; import org.jvnet.hudson.test.Bug; import org.mockito.Mockito; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; import java.net.URLStreamHandler; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import org.jvnet.hudson.test.Issue; import static org.mockito.Mockito.*; /** * @author Kohsuke Kawaguchi */ public class FilePathTest extends ChannelTestCase { public void testCopyTo() throws Exception { File tmp = File.createTempFile("testCopyTo",""); FilePath f = new FilePath(french,tmp.getPath()); f.copyTo(new NullStream()); assertTrue("target does not exist", tmp.exists()); assertTrue("could not delete target " + tmp.getPath(), tmp.delete()); } /** * An attempt to reproduce the file descriptor leak. * If this operation leaks a file descriptor, 2500 should be enough, I think. */ // TODO: this test is much too slow to be a traditional unit test. Should be extracted into some stress test // which is no part of the default test harness? public void testNoFileLeakInCopyTo() throws Exception { for (int j=0; j<2500; j++) { File tmp = File.createTempFile("testCopyFrom",""); FilePath f = new FilePath(tmp); File tmp2 = File.createTempFile("testCopyTo",""); FilePath f2 = new FilePath(british,tmp2.getPath()); f.copyTo(f2); f.delete(); f2.delete(); } } /** * As we moved the I/O handling to another thread, there's a race condition in * {@link FilePath#copyTo(OutputStream)} &mdash; this method can return before * all the writes are delivered to {@link OutputStream}. * * <p> * To reproduce that problem, we use a large number of threads, so that we can * maximize the chance of out-of-order execution, and make sure we are * seeing the right byte count at the end. * * Also see JENKINS-7897 */ @Bug(7871) public void testNoRaceConditionInCopyTo() throws Exception { final File tmp = File.createTempFile("testNoRaceConditionInCopyTo",""); try { int fileSize = 90000; givenSomeContentInFile(tmp, fileSize); List<Future<Integer>> results = whenFileIsCopied100TimesConcurrently(tmp); // THEN copied count was always equal the expected size for (Future<Integer> f : results) assertEquals(fileSize,f.get().intValue()); } finally { tmp.delete(); } } private void givenSomeContentInFile(File file, int size) throws IOException { FileOutputStream os = new FileOutputStream(file); byte[] buf = new byte[size]; for (int i=0; i<buf.length; i++) buf[i] = (byte)(i%256); os.write(buf); os.close(); } private List<Future<Integer>> whenFileIsCopied100TimesConcurrently(final File file) throws InterruptedException { List<Callable<Integer>> r = new ArrayList<Callable<Integer>>(); for (int i=0; i<100; i++) { r.add(new Callable<Integer>() { public Integer call() throws Exception { class Sink extends OutputStream { private Exception closed; private volatile int count; private void checkNotClosed() throws IOException { if (closed != null) throw new IOException(closed); } @Override public void write(int b) throws IOException { count++; checkNotClosed(); } @Override public void write(byte[] b) throws IOException { count+=b.length; checkNotClosed(); } @Override public void write(byte[] b, int off, int len) throws IOException { count+=len; checkNotClosed(); } @Override public void close() throws IOException { closed = new Exception(); //if (size!=count) // fail(); } } FilePath f = new FilePath(french, file.getPath()); Sink sink = new Sink(); f.copyTo(sink); return sink.count; } }); } ExecutorService es = Executors.newFixedThreadPool(100); try { return es.invokeAll(r); } finally { es.shutdown(); } } public void testRepeatCopyRecursiveTo() throws Exception { // local->local copy used to return 0 if all files were "up to date" // should return number of files processed, whether or not they were copied or already current File tmp = Util.createTempDir(), src = new File(tmp, "src"), dst = new File(tmp, "dst"); try { assertTrue(src.mkdir()); assertTrue(dst.mkdir()); File.createTempFile("foo", ".tmp", src); FilePath fp = new FilePath(src); assertEquals(1, fp.copyRecursiveTo(new FilePath(dst))); // copy again should still report 1 assertEquals(1, fp.copyRecursiveTo(new FilePath(dst))); } finally { Util.deleteRecursive(tmp); } } @Bug(9540) public void testErrorMessageInRemoteCopyRecursive() throws Exception { File tmp = Util.createTempDir(); try { File src = new File(tmp, "src"); File dst = new File(tmp, "dst"); FilePath from = new FilePath(src); FilePath to = new FilePath(british, dst.getAbsolutePath()); for (int i = 0; i < 10000; i++) { // TODO is there a simpler way to force the TarOutputStream to be flushed and the reader to start? // Have not found a way to make the failure guaranteed. OutputStream os = from.child("content" + i).write(); try { for (int j = 0; j < 1024; j++) { os.write('.'); } } finally { os.close(); } } FilePath toF = to.child("content0"); toF.write().close(); toF.chmod(0400); try { from.copyRecursiveTo(to); // on Windows this may just succeed; OK, test did not prove anything then } catch (IOException x) { if (Functions.printThrowable(x).contains("content0")) { // Fine, error message talks about permission denied. } else { throw x; } } finally { toF.chmod(700); } } finally { Util.deleteRecursive(tmp); } } public void testArchiveBug4039() throws Exception { File tmp = Util.createTempDir(); try { FilePath d = new FilePath(french,tmp.getPath()); d.child("test").touch(0); d.zip(new NullOutputStream()); d.zip(new NullOutputStream(),"**/*"); } finally { Util.deleteRecursive(tmp); } } public void testNormalization() throws Exception { compare("abc/def\\ghi","abc/def\\ghi"); // allow mixed separators {// basic '.' trimming compare("./abc/def","abc/def"); compare("abc/./def","abc/def"); compare("abc/def/.","abc/def"); compare(".\\abc\\def","abc\\def"); compare("abc\\.\\def","abc\\def"); compare("abc\\def\\.","abc\\def"); } compare("abc/../def","def"); compare("abc/def/../../ghi","ghi"); compare("abc/./def/../././../ghi","ghi"); // interleaving . and .. compare("../abc/def","../abc/def"); // uncollapsible .. compare("abc/def/..","abc"); compare("c:\\abc\\..","c:\\"); // we want c:\\, not c: compare("c:\\abc\\def\\..","c:\\abc"); compare("/abc/../","/"); compare("abc/..","."); compare(".","."); // @Bug(5951) compare("C:\\Hudson\\jobs\\foo\\workspace/../../otherjob/workspace/build.xml", "C:\\Hudson\\jobs/otherjob/workspace/build.xml"); // Other cases that failed before compare("../../abc/def","../../abc/def"); compare("..\\..\\abc\\def","..\\..\\abc\\def"); compare("/abc//../def","/def"); compare("c:\\abc\\\\..\\def","c:\\def"); compare("/../abc/def","/abc/def"); compare("c:\\..\\abc\\def","c:\\abc\\def"); compare("abc/def/","abc/def"); compare("abc\\def\\","abc\\def"); // The new code can collapse extra separator chars compare("abc//def/\\//\\ghi","abc/def/ghi"); compare("\\\\host\\\\abc\\\\\\def","\\\\host\\abc\\def"); // don't collapse for \\ prefix compare("\\\\\\foo","\\\\foo"); compare("//foo","/foo"); // Other edge cases compare("abc/def/../../../ghi","../ghi"); compare("\\abc\\def\\..\\..\\..\\ghi\\","\\ghi"); } private void compare(String original, String answer) { assertEquals(answer,new FilePath((VirtualChannel)null,original).getRemote()); } // @Bug(6494) public void testGetParent() throws Exception { FilePath fp = new FilePath((VirtualChannel)null, "/abc/def"); assertEquals("/abc", (fp = fp.getParent()).getRemote()); assertEquals("/", (fp = fp.getParent()).getRemote()); assertNull(fp.getParent()); fp = new FilePath((VirtualChannel)null, "abc/def\\ghi"); assertEquals("abc/def", (fp = fp.getParent()).getRemote()); assertEquals("abc", (fp = fp.getParent()).getRemote()); assertNull(fp.getParent()); fp = new FilePath((VirtualChannel)null, "C:\\abc\\def"); assertEquals("C:\\abc", (fp = fp.getParent()).getRemote()); assertEquals("C:\\", (fp = fp.getParent()).getRemote()); assertNull(fp.getParent()); } private FilePath createFilePath(final File base, final String... path) throws IOException { File building = base; for (final String component : path) { building = new File(building, component); } FileUtils.touch(building); return new FilePath(building); } public void testList() throws Exception { File baseDir = Util.createTempDir(); try { final Set<FilePath> expected = new HashSet<FilePath>(); expected.add(createFilePath(baseDir, "top", "sub", "app.log")); expected.add(createFilePath(baseDir, "top", "sub", "trace.log")); expected.add(createFilePath(baseDir, "top", "db", "db.log")); expected.add(createFilePath(baseDir, "top", "db", "trace.log")); final FilePath[] result = new FilePath(baseDir).list("**"); assertEquals(expected, new HashSet<FilePath>(Arrays.asList(result))); } finally { Util.deleteRecursive(baseDir); } } public void testListWithExcludes() throws Exception { File baseDir = Util.createTempDir(); try { final Set<FilePath> expected = new HashSet<FilePath>(); expected.add(createFilePath(baseDir, "top", "sub", "app.log")); createFilePath(baseDir, "top", "sub", "trace.log"); expected.add(createFilePath(baseDir, "top", "db", "db.log")); createFilePath(baseDir, "top", "db", "trace.log"); final FilePath[] result = new FilePath(baseDir).list("**", "**/trace.log"); assertEquals(expected, new HashSet<FilePath>(Arrays.asList(result))); } finally { Util.deleteRecursive(baseDir); } } public void testListWithDefaultExcludes() throws Exception { File baseDir = Util.createTempDir(); try { final Set<FilePath> expected = new HashSet<FilePath>(); expected.add(createFilePath(baseDir, "top", "sub", "backup~")); expected.add(createFilePath(baseDir, "top", "CVS", "somefile,v")); expected.add(createFilePath(baseDir, "top", ".git", "config")); // none of the files are included by default (default includes true) assertEquals(0, new FilePath(baseDir).list("**", "").length); final FilePath[] result = new FilePath(baseDir).list("**", "", false); assertEquals(expected, new HashSet<FilePath>(Arrays.asList(result))); } finally { Util.deleteRecursive(baseDir); } } @Bug(11073) public void testIsUnix() { VirtualChannel dummy = Mockito.mock(VirtualChannel.class); FilePath winPath = new FilePath(dummy, " c:\\app\\hudson\\workspace\\3.8-jelly-db\\jdk/jdk1.6.0_21/label/sqlserver/profile/sqlserver\\acceptance-tests\\distribution.zip"); assertFalse(winPath.isUnix()); FilePath base = new FilePath(dummy, "c:\\app\\hudson\\workspace\\3.8-jelly-db"); FilePath middle = new FilePath(base, "jdk/jdk1.6.0_21/label/sqlserver/profile/sqlserver"); FilePath full = new FilePath(middle, "acceptance-tests\\distribution.zip"); assertFalse(full.isUnix()); FilePath unixPath = new FilePath(dummy, "/home/test"); assertTrue(unixPath.isUnix()); } /** * Tests that permissions are kept when using {@link FilePath#copyToWithPermission(FilePath)}. * Also tries to check that a problem with setting the last-modified date on Windows doesn't fail the whole copy * - well at least when running this test on a Windows OS. See JENKINS-11073 */ public void testCopyToWithPermission() throws IOException, InterruptedException { File tmp = Util.createTempDir(); try { File child = new File(tmp,"child"); FilePath childP = new FilePath(child); childP.touch(4711); Chmod chmodTask = new Chmod(); chmodTask.setProject(new Project()); chmodTask.setFile(child); chmodTask.setPerm("0400"); chmodTask.execute(); FilePath copy = new FilePath(british,tmp.getPath()).child("copy"); childP.copyToWithPermission(copy); assertEquals(childP.mode(),copy.mode()); if (!Functions.isWindows()) { assertEquals(childP.lastModified(),copy.lastModified()); } // JENKINS-11073: // Windows seems to have random failures when setting the timestamp on newly generated // files. So test that: for (int i=0; i<100; i++) { copy = new FilePath(british,tmp.getPath()).child("copy"+i); childP.copyToWithPermission(copy); } } finally { Util.deleteRecursive(tmp); } } public void testSymlinkInTar() throws Exception { if (Functions.isWindows()) return; // can't test on Windows FilePath tmp = new FilePath(Util.createTempDir()); try { FilePath in = tmp.child("in"); in.mkdirs(); in.child("c").touch(0); in.child("b").symlinkTo("c", TaskListener.NULL); FilePath tar = tmp.child("test.tar"); in.tar(tar.write(), "**/*"); FilePath dst = in.child("dst"); tar.untar(dst, TarCompression.NONE); assertEquals("c",dst.child("b").readLink()); } finally { tmp.deleteRecursive(); } } @Bug(13649) public void testMultiSegmentRelativePaths() throws Exception { VirtualChannel d = Mockito.mock(VirtualChannel.class); FilePath winPath = new FilePath(d, "c:\\app\\jenkins\\workspace"); FilePath nixPath = new FilePath(d, "/opt/jenkins/workspace"); assertEquals("c:\\app\\jenkins\\workspace\\foo\\bar\\manchu", new FilePath(winPath, "foo/bar/manchu").getRemote()); assertEquals("c:\\app\\jenkins\\workspace\\foo\\bar\\manchu", new FilePath(winPath, "foo\\bar/manchu").getRemote()); assertEquals("c:\\app\\jenkins\\workspace\\foo\\bar\\manchu", new FilePath(winPath, "foo\\bar\\manchu").getRemote()); assertEquals("/opt/jenkins/workspace/foo/bar/manchu", new FilePath(nixPath, "foo\\bar\\manchu").getRemote()); assertEquals("/opt/jenkins/workspace/foo/bar/manchu", new FilePath(nixPath, "foo/bar\\manchu").getRemote()); assertEquals("/opt/jenkins/workspace/foo/bar/manchu", new FilePath(nixPath, "foo/bar/manchu").getRemote()); } public void testValidateAntFileMask() throws Exception { File tmp = Util.createTempDir(); try { FilePath d = new FilePath(french, tmp.getPath()); d.child("d1/d2/d3").mkdirs(); d.child("d1/d2/d3/f.txt").touch(0); d.child("d1/d2/d3/f.html").touch(0); d.child("d1/d2/f.txt").touch(0); assertValidateAntFileMask(null, d, "**/*.txt"); assertValidateAntFileMask(null, d, "d1/d2/d3/f.txt"); assertValidateAntFileMask(null, d, "**/*.html"); assertValidateAntFileMask(Messages.FilePath_validateAntFileMask_portionMatchButPreviousNotMatchAndSuggest("**/*.js", "**", "**/*.js"), d, "**/*.js"); assertValidateAntFileMask(Messages.FilePath_validateAntFileMask_doesntMatchAnything("index.htm"), d, "index.htm"); assertValidateAntFileMask(Messages.FilePath_validateAntFileMask_doesntMatchAndSuggest("f.html", "d1/d2/d3/f.html"), d, "f.html"); // TODO lots more to test, e.g. multiple patterns separated by commas; ought to have full code coverage for this method } finally { Util.deleteRecursive(tmp); } } @SuppressWarnings("deprecation") private static void assertValidateAntFileMask(String expected, FilePath d, String fileMasks) throws Exception { assertEquals(expected, d.validateAntFileMask(fileMasks)); } @Issue("JENKINS-7214") @SuppressWarnings("deprecation") public void testValidateAntFileMaskBounded() throws Exception { File tmp = Util.createTempDir(); try { FilePath d = new FilePath(french, tmp.getPath()); FilePath d2 = d.child("d1/d2"); d2.mkdirs(); for (int i = 0; i < 100; i++) { FilePath d3 = d2.child("d" + i); d3.mkdirs(); d3.child("f.txt").touch(0); } assertEquals(null, d.validateAntFileMask("d1/d2/**/f.txt")); assertEquals(null, d.validateAntFileMask("d1/d2/**/f.txt", 10)); assertEquals(Messages.FilePath_validateAntFileMask_portionMatchButPreviousNotMatchAndSuggest("**/*.js", "**", "**/*.js"), d.validateAntFileMask("**/*.js", 1000)); try { d.validateAntFileMask("**/*.js", 10); fail(); } catch (InterruptedException x) { // good } } finally { Util.deleteRecursive(tmp); } } @Bug(15418) public void testDeleteLongPathOnWindows() throws Exception { File tmp = Util.createTempDir(); try { FilePath d = new FilePath(french, tmp.getPath()); // construct a very long path StringBuilder sb = new StringBuilder(); while(sb.length() + tmp.getPath().length() < 260 - "very/".length()) { sb.append("very/"); } sb.append("pivot/very/very/long/path"); FilePath longPath = d.child(sb.toString()); longPath.mkdirs(); FilePath childInLongPath = longPath.child("file.txt"); childInLongPath.touch(0); File firstDirectory = new File(tmp.getAbsolutePath() + "/very"); Util.deleteRecursive(firstDirectory); assertFalse("Could not delete directory!", firstDirectory.exists()); } finally { Util.deleteRecursive(tmp); } } @Bug(16215) public void testInstallIfNecessaryAvoidsExcessiveDownloadsByUsingIfModifiedSince() throws Exception { final File tmp = Util.createTempDir(); try { final FilePath d = new FilePath(tmp); d.child(".timestamp").touch(123000); final HttpURLConnection con = mock(HttpURLConnection.class); final URL url = someUrlToZipFile(con); when(con.getResponseCode()) .thenReturn(HttpURLConnection.HTTP_NOT_MODIFIED); assertFalse(d.installIfNecessaryFrom(url, null, null)); verify(con).setIfModifiedSince(123000); } finally { Util.deleteRecursive(tmp); } } @Bug(16215) public void testInstallIfNecessaryPerformsInstallation() throws Exception { final File tmp = Util.createTempDir(); try { final FilePath d = new FilePath(tmp); final HttpURLConnection con = mock(HttpURLConnection.class); final URL url = someUrlToZipFile(con); when(con.getResponseCode()) .thenReturn(HttpURLConnection.HTTP_OK); when(con.getInputStream()) .thenReturn(someZippedContent()); assertTrue(d.installIfNecessaryFrom(url, null, null)); } finally { Util.deleteRecursive(tmp); } } private URL someUrlToZipFile(final URLConnection con) throws IOException { final URLStreamHandler urlHandler = new URLStreamHandler() { @Override protected URLConnection openConnection(URL u) throws IOException { return con; } }; return new URL("http", "some-host", 0, "/some-path.zip", urlHandler); } private InputStream someZippedContent() throws IOException { final ByteArrayOutputStream buf = new ByteArrayOutputStream(); final ZipOutputStream zip = new ZipOutputStream(buf); zip.putNextEntry(new ZipEntry("abc")); zip.write("abc".getBytes()); zip.close(); return new ByteArrayInputStream(buf.toByteArray()); } @Bug(16846) public void testMoveAllChildrenTo() throws IOException, InterruptedException { final File tmp = Util.createTempDir(); try { final String dirname = "sub"; final File top = new File(tmp, "test"); final File sub = new File(top, dirname); final File subsub = new File(sub, dirname); subsub.mkdirs(); final File subFile1 = new File( sub.getAbsolutePath() + "/file1.txt" ); subFile1.createNewFile(); final File subFile2 = new File( subsub.getAbsolutePath() + "/file2.txt" ); subFile2.createNewFile(); final FilePath src = new FilePath(sub); final FilePath dst = new FilePath(top); // test conflict subdir src.moveAllChildrenTo(dst); } finally { Util.deleteRecursive(tmp); } } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.upgradeTests.Changes10_9 Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.upgradeTests; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import javax.sql.DataSource; import junit.framework.Test; import org.apache.derbyTesting.functionTests.tests.upgradeTests.helpers.DisposableIndexStatistics; import org.apache.derbyTesting.junit.BaseTestSuite; import org.apache.derbyTesting.junit.IndexStatsUtil; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.JDBCDataSource; import org.apache.derbyTesting.junit.SupportFilesSetup; import org.apache.derbyTesting.junit.TestConfiguration; /** * Upgrade test cases for 10.9. */ public class Changes10_9 extends UpgradeChange { /////////////////////////////////////////////////////////////////////////////////// // // CONSTANTS // /////////////////////////////////////////////////////////////////////////////////// private static final String UPGRADE_REQUIRED = "XCL47"; private static final String INVALID_PROVIDER_CHANGE = "XCY05"; /////////////////////////////////////////////////////////////////////////////////// // // STATE // /////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////// // // CONSTRUCTOR // /////////////////////////////////////////////////////////////////////////////////// public Changes10_9(String name) { super(name); initPattern(); } /////////////////////////////////////////////////////////////////////////////////// // // JUnit BEHAVIOR // /////////////////////////////////////////////////////////////////////////////////// private static final String[] SUPPORT_FILES_SOURCE = { "functionTests/tests/lang/dcl_java.jar", "functionTests/tests/lang/dcl_emc1.jar", "functionTests/tests/lang/dcl_emc2.jar", }; /** * Return the suite of tests to test the changes made in 10.7. * @param phase an integer that indicates the current phase in * the upgrade test. * @return the test suite created. */ public static Test suite(int phase) { BaseTestSuite suite = new BaseTestSuite("Upgrade test for 10.9"); suite.addTestSuite(Changes10_9.class); return new SupportFilesSetup( (Test)suite, SUPPORT_FILES_SOURCE); } /////////////////////////////////////////////////////////////////////////////////// // // TESTS // /////////////////////////////////////////////////////////////////////////////////// /** * Make sure that the drop statistics procedure only appears after * hard-upgrade. */ public void testDropStatisticsProc() throws Exception { Statement s = createStatement(); switch ( getPhase() ) { case PH_CREATE: // create with old version s.execute("CREATE TABLE dropStatsT1 (c11 int, c12 int) "); vetProcs(s, "call syscs_util.syscs_drop_statistics( 'APP', 'DROPSTATST1', null )", false); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade vetProcs(s, "call syscs_util.syscs_drop_statistics( 'APP', 'DROPSTATST1', null )", false); break; case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade vetProcs(s, "call syscs_util.syscs_drop_statistics( 'APP', 'DROPSTATST1', null )", false); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade vetProcs(s, "call syscs_util.syscs_drop_statistics( 'APP', 'DROPSTATST1', null )", true); s.execute("DROP TABLE dropStatsT1"); break; } s.close(); } /** * Make sure that the catalogs and procedures for NATIVE authentication * only appear after hard-upgrade. */ public void testNativeAuthentication() throws Exception { Statement s = createStatement(); switch ( getPhase() ) { case PH_CREATE: // create with old version vetSYSUSERS( s, false ); vetNativeProcs( s, false ); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade vetSYSUSERS( s, false ); vetNativeProcs( s, false ); break; case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade vetSYSUSERS( s, false ); vetNativeProcs( s, false ); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade vetSYSUSERS( s, true ); vetNativeProcs( s, true ); break; } s.close(); } private void vetProcs( Statement s, String procCall, boolean shouldExist ) throws Exception { try { s.execute( procCall ); if ( !shouldExist ) { fail( "syscs_util.syscs_create_user should not exist." ); } } catch (SQLException se ) { if ( shouldExist ) { assertSQLState( "4251K", se ); } else { assertSQLState( "42Y03", se ); } } } private void vetSYSUSERS( Statement s, boolean shouldExist ) throws Exception { ResultSet rs = s.executeQuery( "select count(*) from sys.systables where tablename = 'SYSUSERS'" ); rs.next(); int expectedValue = shouldExist ? 1 : 0; assertEquals( expectedValue, rs.getInt( 1 ) ); rs.close(); } private void vetNativeProcs( Statement s, boolean shouldExist ) throws Exception { // make sure that an authentication algorithm has been set String defaultDigestAlgorithm = pushAuthenticationAlgorithm( s ); vetProcs(s, "call syscs_util.syscs_create_user( 'FRED', 'fredpassword' )", shouldExist); // restore the authentication algorithm if we changed it popAuthenticationAlgorithm( s, defaultDigestAlgorithm ); } private String pushAuthenticationAlgorithm( Statement s ) throws Exception { // make sure that an authentication algorithm has been set. // otherwise, we won't be able to create NATIVE users. String defaultDigestAlgorithm = getDatabaseProperty( s, "derby.authentication.builtin.algorithm" ); if ( defaultDigestAlgorithm == null ) { setDatabaseProperty( s, "derby.authentication.builtin.algorithm", "SHA-1" ); } return defaultDigestAlgorithm; } private void popAuthenticationAlgorithm( Statement s, String defaultDigestAlgorithm ) throws Exception { // restore the authentication algorithm if we changed it if ( defaultDigestAlgorithm == null ) { setDatabaseProperty( s, "derby.authentication.builtin.algorithm", null ); } } private void setDatabaseProperty( Statement s, String key, String value ) throws Exception { if ( value == null ) { value = "cast ( null as varchar( 32672 ) )"; } else { value = "'" + value + "'"; } String command = "call syscs_util.syscs_set_database_property( '" + key + "', " + value + " )"; s.execute( command ); } private String getDatabaseProperty( Statement s, String key ) throws Exception { ResultSet rs = s.executeQuery( "values( syscs_util.syscs_get_database_property( '" + key + "' ) )" ); try { rs.next(); return rs.getString( 1 ); } finally { rs.close(); } } /** * Make sure that NATIVE LOCAL authentication can't be turned on * before hard-upgrade. */ public void testNativeLocalAuthentication() throws Exception { Statement s = createStatement(); switch ( getPhase() ) { case PH_CREATE: // create with old version case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade // // It's possible (although very unlikely) that someone could set the // authentication provider to be NATIVE::LOCAL in an old database // just before upgrading. If they do this, they will get an error at // soft-upgrade time and they will have to back off to the old // derby version in order to unset the authentication provider. // setDatabaseProperty( s, "derby.authentication.provider", "NATIVE::LOCAL" ); setDatabaseProperty( s, "derby.authentication.provider", null ); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade setDatabaseProperty( s, "derby.authentication.provider", "com.acme.AcmeAuthenticator" ); assertStatementError ( UPGRADE_REQUIRED, s, "call syscs_util.syscs_set_database_property( 'derby.authentication.provider', 'NATIVE::LOCAL' )" ); setDatabaseProperty( s, "derby.authentication.provider", null ); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade // // Can't actually turn on NATIVE LOCAL authentication in the upgrade tests because, once turned on, // you can't turn it off and that would mess up later tests. // break; } s.close(); } /** * Make sure builtin authentication doesn't use a hash scheme that's not * supported by the old version until the database has been hard upgraded. * See DERBY-4483 and DERBY-5539. */ public void testBuiltinAuthenticationWithConfigurableHash() throws SQLException { // This test needs to enable authentication, which is not supported // in the default database for the upgrade tests, so roll our own. DataSource ds = JDBCDataSource.getDataSourceLogical("BUILTIN_10_9"); // Add create=true or upgrade=true, as appropriate, since we don't // get this for free when we don't use the default database. if (getPhase() == PH_CREATE) { JDBCDataSource.setBeanProperty(ds, "createDatabase", "create"); } else if (getPhase() == PH_HARD_UPGRADE) { JDBCDataSource.setBeanProperty( ds, "connectionAttributes", "upgrade=true"); } // Connect as database owner, possibly creating or upgrading the // database. Connection c = ds.getConnection("dbo", "the boss"); // Let's first verify that all the users can connect after the changes // in the previous phase. Would fail for instance in post soft upgrade // if soft upgrade saved passwords using the new scheme. verifyCanConnect(ds); CallableStatement setProp = c.prepareCall( "call syscs_util.syscs_set_database_property(?, ?)"); if (getPhase() == PH_CREATE) { // The database is being created. Make sure that builtin // authentication is enabled. setProp.setString(1, "derby.connection.requireAuthentication"); setProp.setString(2, "true"); setProp.execute(); setProp.setString(1, "derby.authentication.provider"); setProp.setString(2, "BUILTIN"); setProp.execute(); // Set the length of the random salt to 0 to ensure that the // hashed token doesn't vary between test runs. setProp.setString(1, "derby.authentication.builtin.saltLength"); setProp.setInt(2, 0); setProp.execute(); } // Set (or reset) passwords for all users. setPasswords(setProp); setProp.close(); // We should still be able to connect. verifyCanConnect(ds); // Check that the passwords are stored using the expected scheme (new // configurable hash scheme in hard upgrade, old scheme otherwise). verifyPasswords(c); c.close(); // The framework doesn't know how to shutdown a database using // authentication, so do it manually as database owner here. JDBCDataSource.setBeanProperty(ds, "user", "dbo"); JDBCDataSource.setBeanProperty(ds, "password", "the boss"); JDBCDataSource.shutdownDatabase(ds); } /** * Information about users for the test of builtin authentication with * configurable hash algorithm. Two-dimensional array of strings where * each row contains (1) a user name, (2) a password, (3) the name of a * digest algorithm with which the password should be hashed, (4) the * hashed password when the old scheme is used, (5) the hashed password * when the new, configurable hash scheme is used in databases that * don't support the key-stretching extension (DERBY-5539), and (6) the * hashed password when configurable hash with key stretching is used. */ private static final String[][] USERS = { { "dbo", "the boss", null, "3b6071d99b1d48ab732e75a8de701b6c77632db65898", "3b6071d99b1d48ab732e75a8de701b6c77632db65898", "3b6071d99b1d48ab732e75a8de701b6c77632db65898", }, { "pat", "postman", "MD5", "3b609129e181a7f7527697235c8aead65c461a0257f3", "3b61aaca567ed43d1ba2e6402cbf1a723407:MD5", "3b624f4b0d7f3d2330c1db98a2000c62b5cd::1000:MD5", }, { "sam", "fireman", "SHA-1", "3b609e5173cfa03620061518adc92f2a58c7b15cf04f", "3b6197160362c0122fcd7a63a9da58fd0781140901fb:SHA-1", "3b62a2d88ffac5332219116ab53e29dd3b9e1222e990::1000:SHA-1", }, }; /** * Set the passwords for all users specified in {@code USERS}. * * @param cs a callable statement that sets database properties */ private void setPasswords(CallableStatement cs) throws SQLException { for (int i = 0; i < USERS.length; i++) { // Use the specified algorithm, if possible. (Will be ignored if // the data dictionary doesn't support the new scheme.) cs.setString(1, Changes10_6.HASH_ALGORITHM_PROPERTY); cs.setString(2, USERS[i][2]); cs.execute(); // Set the password. cs.setString(1, "derby.user." + USERS[i][0]); cs.setString(2, USERS[i][1]); cs.execute(); } } /** * Verify that all passwords for the users in {@code USERS} are stored * as expected. Raise an assert failure on mismatch. * * @param c a connection to the database */ private void verifyPasswords(Connection c) throws SQLException { int pwIdx; if (getPhase() == PH_HARD_UPGRADE) { // Expect configurable hash scheme with key stretching in fully // upgraded databases. pwIdx = 5; } else if (oldAtLeast(10, 6)) { // Databases whose dictionary is at least version 10.6 support // configurable hash without key stretching. pwIdx = 4; } else { // Older databases only support the old scheme based on SHA-1. pwIdx = 3; } PreparedStatement ps = c.prepareStatement( "values syscs_util.syscs_get_database_property(?)"); for (int i = 0; i < USERS.length; i++) { String expectedToken = USERS[i][pwIdx]; ps.setString(1, "derby.user." + USERS[i][0]); JDBC.assertSingleValueResultSet(ps.executeQuery(), expectedToken); } ps.close(); } /** * Verify that all users specified in {@code USERS} can connect to the * database. * * @param ds a data source for connecting to the database * @throws SQLException if one of the users cannot connect to the database */ private void verifyCanConnect(DataSource ds) throws SQLException { for (int i = 0; i < USERS.length; i++) { Connection c = ds.getConnection(USERS[i][0], USERS[i][1]); c.close(); } } /** * For 10.9 and later storage of jar files changed. DERBY-5357. */ public void testJarStorage() throws Exception { Statement s = createStatement(); switch (getPhase()) { case PH_CREATE: // create with old version createSchema("EMC"); createSchema("FOO"); s.executeUpdate( "create procedure EMC.ADDCONTACT(id INT, e_mail VARCHAR(30)) " + "MODIFIES SQL DATA " + "external name " + "'org.apache.derbyTesting.databaseclassloader.emc.addContact'" + " language java parameter style java"); s.executeUpdate( "create table EMC.CONTACTS " + " (id int, e_mail varchar(30))"); installJar("dcl_emc1.jar", "EMC.MAIL_APP"); installJar("dcl_java.jar", "EMC.MY_JAVA"); installJar("dcl_emc2.jar", "FOO.BAR"); setDBClasspath("EMC.MAIL_APP"); tryCall(); setDBClasspath(null); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade // DERBY-6505: Take a backup of the database. Backing up the // jar storage takes a different path in soft upgrade, so make // sure that path is exercised. String backupdir = SupportFilesSetup.getReadWriteFileName("d6505-backup"); PreparedStatement backupStmt = prepareStatement("call syscs_util.syscs_backup_database(?)"); backupStmt.setString(1, backupdir); backupStmt.execute(); // fallthrough... run the same tests as in post soft upgrade. case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade setDBClasspath("EMC.MAIL_APP"); tryCall(); setDBClasspath(null); // if we can do this, it hasn't moved already: replaceJar("dcl_emc1.jar", "EMC.MAIL_APP"); setDBClasspath("EMC.MAIL_APP"); tryCall(); setDBClasspath(null); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade setDBClasspath("EMC.MAIL_APP"); tryCall(); setDBClasspath(null); installJar("dcl_emc1.jar", "FOO.\"BAR/..\\../\""); verifyNewLocations(4); removeJar("EMC.MAIL_APP"); installJar("dcl_emc1.jar", "EMC.MAIL_APP"); setDBClasspath("EMC.MAIL_APP"); tryCall(); setDBClasspath(null); // finally, check that all the rest are also here replaceJar("dcl_java.jar", "EMC.MY_JAVA"); replaceJar("dcl_emc2.jar", "FOO.BAR"); replaceJar("dcl_emc1.jar", "FOO.\"BAR/..\\../\""); // clean up removeJar("EMC.MY_JAVA"); removeJar("FOO.BAR"); removeJar("FOO.\"BAR/..\\../\""); removeJar("EMC.MAIL_APP"); s.executeUpdate("drop table EMC.CONTACTS"); s.executeUpdate("drop procedure EMC.ADDCONTACT"); s.executeUpdate("drop schema FOO restrict"); s.executeUpdate("drop schema EMC restrict"); break; } s.close(); } private void createSchema(String name) throws SQLException { Statement s = createStatement(); s.executeUpdate("create schema " + name); s.close(); } private void installJar(String resource, String jarName) throws SQLException, MalformedURLException { URL jar = SupportFilesSetup.getReadOnlyURL(resource); CallableStatement cs = prepareCall("CALL SQLJ.INSTALL_JAR(?, ?, 0)"); cs.setString(1, jar.toExternalForm()); cs.setString(2, jarName); cs.executeUpdate(); cs.close(); } private void replaceJar(String resource, String jarName) throws SQLException, MalformedURLException { URL jar = SupportFilesSetup.getReadOnlyURL(resource); CallableStatement cs = prepareCall("CALL SQLJ.REPLACE_JAR(?, ?)"); cs.setString(1, jar.toExternalForm()); cs.setString(2, jarName); cs.executeUpdate(); cs.close(); } private void removeJar(String jarName) throws SQLException { CallableStatement cs = prepareCall("CALL SQLJ.REMOVE_JAR(?, 0)"); cs.setString(1, jarName); cs.executeUpdate(); cs.close(); } private void setDBClasspath(String cp) throws SQLException { CallableStatement cs = prepareCall( "CALL SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(" + "'derby.database.classpath', ?)"); cs.setString(1, cp); cs.executeUpdate(); cs.close(); } private void tryCall() throws SQLException { if (JDBC.vmSupportsJSR169()) { return; // skip, EMC uses DriverManager } CallableStatement cs = prepareCall("CALL EMC.ADDCONTACT(?, ?)"); cs.setInt(1, 0); cs.setString(2, "now@classpathchange.com"); cs.executeUpdate(); cs.close(); } private void verifyNewLocations(int noOfObjects) throws SQLException { TestConfiguration tc = TestConfiguration.getCurrent(); String dbPath = tc.getPhysicalDatabaseName(tc.getDefaultDatabaseName()); String jarDirName = "system" + File.separator + dbPath + File.separator + "jar"; File jarDir = new File(jarDirName); assertTrue(jarDir.isDirectory()); File[] contents = jarDir.listFiles(); // <db>/jar should now contain this no of files, none of which are // directories assertEquals(noOfObjects, contents.length); // assert that all the old style directories are gone for (int i=0; i < contents.length; i++) { File f = contents[i]; assertTrue(f.isFile()); assertFileNameShape(f.getName()); } } /** * Regexp pattern to match the file name of a jar file stored in the * database (version >= 10.9). */ private Goal[] pattern; /** * Initialize a pattern corresponding to: * <p/> * &lt;Derby uuid string&gt;[.]jar[.]G[0-9]+ * <p/> * where: * <p/> * &lt;Derby uuid string&gt; has the form * hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh * <p/> * where <em>h</em> id a lower case hex digit. */ private void initPattern() { List<Goal> l = new ArrayList<Goal>(100); // The UUID format is determined by // org.apache.derby.impl.services.uuid.BasicUUID#toString for (int i=0; i < 8; i++) { l.add(new CharRange(new char[][]{{'0','9'},{'a','f'}})); } l.add(new SingleChar('-')); for (int j = 0; j < 3; j++) { for (int i=0; i < 4; i++) { l.add(new CharRange(new char[][]{{'0','9'},{'a','f'}})); } l.add(new SingleChar('-')); } for (int i=0; i < 12; i++) { l.add(new CharRange(new char[][]{{'0','9'},{'a','f'}})); } l.add(new SingleChar('.')); l.add(new SingleChar('j')); l.add(new SingleChar('a')); l.add(new SingleChar('r')); l.add(new SingleChar('.')); l.add(new SingleChar('G')); l.add(new CharRange(new char[][]{{'0','9'}}, Goal.REPEAT)); this.pattern = l.toArray(new Goal[l.size()]); } /** * assert that fName has the expected shape of a jar file * in the database (version >= 10.9). */ private void assertFileNameShape(String fName) { assertTrue(matches(fName, pattern)); } /** * Poor man's regexp matcher: can match patterns of type below, where * start "^" and end "$" is implied: must match whole string. * <p/> * reg.exp: ( '[' &lt;fromchar&gt;-&lt;tochar&gt; ] '+'? ']' | * &lt;char&gt; '+'? )* */ private boolean matches(String fName, Goal[] pattern) { int patIdx = 0; for (int i = 0; i < fName.length(); i++) { Goal p = pattern[patIdx]; char c = fName.charAt(i); if (p.matches(c)) { if (!p.isRepeatable()) { patIdx++; } p.setFoundOnce(); continue; } // Goal did not match: if we have a repeatable goal and we already // found one occurence it's ok, to step on to next goal in pattern // and see it that matches. patIdx++; if (p.matches(c)) { if (!p.isRepeatable()) { patIdx++; } p.setFoundOnce(); continue; } return false; } return patIdx >= (pattern.length - 1); // exact match } abstract class Goal { public abstract boolean matches(char c); public final static int REPEAT = 0; // optional goal property int option = -1; boolean foundOnce = false; public boolean isRepeatable () { return option == REPEAT; } public void setFoundOnce() { this.foundOnce = true; } public boolean foundOnce () { return this.foundOnce; } } private class CharRange extends Goal { private char[][] ranges; public CharRange(char[][]ranges) { this.ranges = (char[][])ranges.clone(); } public CharRange(char[][]ranges, int option) { this.ranges = (char[][])ranges.clone(); this.option = option; } public boolean matches(char c) { for (int i = 0; i < ranges.length; i++) { if (c >= ranges[i][0] && c <= ranges[i][1]) { return true; } } return false; } } private class SingleChar extends Goal { private char c; private int option = -1; private boolean foundOnce = false; public SingleChar(char c) { this.c = c; } public SingleChar(char c, int option) { this.c = c; this.option = option; } public boolean matches(char c) { return c == this.c; } } /** * Test the changes introduced to fix correctness problems with sequences. */ public void test_5493() throws Exception { Connection conn = getConnection(); Statement s = createStatement(); switch ( getPhase() ) { case PH_CREATE: // create with old version assertNull( getNewFunctionID( s ) ); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade assertNull( getNewFunctionID( s ) ); break; case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade assertNull( getNewFunctionID( s ) ); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade assertNotNull( getNewFunctionID( s ) ); break; } s.close(); } private String getNewFunctionID( Statement s ) throws Exception { ResultSet rs = null; try { rs = s.executeQuery ( "select aliasid from sys.sysaliases where alias = 'SYSCS_PEEK_AT_SEQUENCE'" ); if ( !rs.next() ) { return null; } else { return rs.getString( 1 ); } } finally { if ( rs != null ) { rs.close(); } } } /** * Verifies that an orphaned statistics entry can be dropped by running the * {@code SYSCS_DROP_STATISTICS} system procedure. * <p> * Relevant JIRAs: * <ul> <li>DERBY-4115: Provide a way to drop statistics information</li> * <li>DERBY-5681: When a foreign key constraint on a table is dropped, * the associated statistics row for the conglomerate</li> * </ul> * <p> * DERBY-5702(Creating a foreign key constraint does not automatically * create a statistics row if foreign key constraint will share a * backing index created for a primay key) is causing a problem for * us to test the hanging statistics row with 10.4 and prior releases. * Following test relies on having hanging statistics rows which should * have been dropped when the constraint owing it was dropped. The test * then goes ahead and uses the new drop statisitcs procedure to drop * the hanging statistics rows. But because of DERBY-5702, when a * constraint is added which will reuse an existing backing index, * no statistics row is created for that constraint unless a user were * to say use an update statistics stored procedure to create the * statistics for that constraint. And later when that constraint is * dropped, we will find that because of DERBY-5681, the statistics * row never gets dropped. But update statistics stored procedure was * not introduced up until 10.5 and because of that, we can't really * test for hanging index created through constraints sharing the same * backing index prior to 10.5 */ public void testDropOrphanedStatistics() throws SQLException { // Update statistics procedure SYSCS_UPDATE_STATISTICS is not available // prior to 10.5 and hence we can't cause the hanging statistics to // appear in order to test the drop statistics after hard upgrade if (!oldAtLeast(10, 5)) return; // Helper object to obtain information about index statistics. IndexStatsUtil stats = new IndexStatsUtil(openDefaultConnection()); Statement s = createStatement(); // The expected initial number of statistics entries in TEST_TAB_2. final int expected = DisposableIndexStatistics.hasDerby5681Bug(getOldVersion()) ? 2 : 1; switch (getPhase()) { case PH_CREATE: s.executeUpdate("CREATE TABLE TEST_TAB_1 (c11 int not null,"+ "c12 int not null, c13 int)"); s.executeUpdate("INSERT INTO TEST_TAB_1 VALUES(1,1,1),(2,2,2)"); s.executeUpdate("ALTER TABLE TEST_TAB_1 "+ "ADD CONSTRAINT TEST_TAB_1_PK_1 "+ "PRIMARY KEY (c11)"); //The statistics for primary key constraint has been added stats.assertTableStats("TEST_TAB_1",1); s.executeUpdate("CREATE TABLE TEST_TAB_2 (c21 int not null)"); s.executeUpdate("INSERT INTO TEST_TAB_2 VALUES(1),(2)"); s.executeUpdate("ALTER TABLE TEST_TAB_2 "+ "ADD CONSTRAINT TEST_TAB_2_PK_1 "+ "PRIMARY KEY (c21)"); stats.assertTableStats("TEST_TAB_2",1); //DERBY-5702 Add a foreign key constraint and now we should find 2 rows // of statistics for TEST_TAB_2 - 1 for primary key and other for // foreign key constraint s.executeUpdate("ALTER TABLE TEST_TAB_2 "+ "ADD CONSTRAINT TEST_TAB_2_FK_1 "+ "FOREIGN KEY(c21) REFERENCES TEST_TAB_1(c11)"); //DERBY-5702 Like primary key earlier, adding foreign key constraint // didn't automatically add a statistics row for it. Have to run update // statistics manually to get a row added for it's stat stats.assertTableStats("TEST_TAB_2",1); //Need to do a compress table to create the statistics for foreign // key constraint. Update statisitcs procedure is only available // in 10.5 and upwards and hence can't use that procedure here // since we are testing older releases too. s.execute("CALL SYSCS_UTIL.SYSCS_UPDATE_STATISTICS('APP','TEST_TAB_2', null)"); //s.execute("CALL SYSCS_UTIL.SYSCS_COMPRESS_TABLE('APP','TEST_TAB_2',1)"); stats.assertTableStats("TEST_TAB_2",2); s.executeUpdate("ALTER TABLE TEST_TAB_2 "+ "DROP CONSTRAINT TEST_TAB_2_FK_1"); //Dropping the foreign key constraint does not remove it's // statistics row because of DERBY-5681. stats.assertTableStats("TEST_TAB_2", expected); assertStatementError("42Y03", s, "CALL SYSCS_UTIL.SYSCS_DROP_STATISTICS('APP','TEST_TAB_2', null)"); break; case PH_SOFT_UPGRADE: case PH_POST_SOFT_UPGRADE: assertStatementError("42Y03", s, "CALL SYSCS_UTIL.SYSCS_DROP_STATISTICS('APP','TEST_TAB_2', null)"); break; case PH_HARD_UPGRADE: stats.assertTableStats("TEST_TAB_2", expected); s.execute("CALL SYSCS_UTIL.SYSCS_DROP_STATISTICS('APP','TEST_TAB_2', null)"); stats.assertNoStatsTable("TEST_TAB_2"); s.execute("CALL SYSCS_UTIL.SYSCS_UPDATE_STATISTICS('APP','TEST_TAB_2', null)"); stats.assertNoStatsTable("TEST_TAB_2"); break; case PH_POST_HARD_UPGRADE: //Make sure that the new procedure is still available s.execute("CALL SYSCS_UTIL.SYSCS_DROP_STATISTICS('APP','TEST_TAB_2', null)"); s.executeUpdate("DROP TABLE TEST_TAB_1"); s.executeUpdate("DROP TABLE TEST_TAB_2"); break; } } /** * Verifies the behavior of the update statistics code when faced with * "disposable statistics entries". * <p> * A disposable statistics entry is a row in SYS.SYSSTATISTICS that has * been orphaned (see DERBY-5681) or it is on longer needed by the * Derby optimizer (due to internal changes/improvements). * <p> * This test expects different things based on the phase: * <dl> <dt>create</dt> * <dd>- run statements that will cause disposable statistics * entries to be created</dd> * <dt>soft upgrade</dt> * <dd>- run the new update statistics code, expecting it to leave the * disposable statistics intact</dd> * <dt>downgrade</dt> * <dd>- verify that the relevant statistics are present</dd> * <dt>hard upgrade</dt> * <dd>- run the new update statistics code, expecting it to get rid * of the disposable statistics</dd> * </dl> */ public void testDisposableStatisticsExplicit() throws SQLException { // Don't run this test with versions prior to 10.5, since the // required SYSCS_UPDATE_STATISTICS don't exist in older versions. if (!oldAtLeast(10, 5)) { return; } final String TBL = "ISTAT_DISPOSABLE_STATS"; String updateStatsSQL = "call syscs_util.syscs_update_statistics(" + "'APP', ?, null)"; DisposableIndexStatistics dis = new DisposableIndexStatistics( getOldVersion(), getConnection(), TBL); switch (getPhase()) { // create with old version case PH_CREATE: { dis.createAndPopulateTables(); // The expected number of statistics entries depends on the // version of Derby used to create the database. Some older // versions of Derby contained a bug and lacked optimizations, // causing the number of statistics entries to increase. // Just after creation and before any update statistics expect // all stats to exist. dis.assertStatsCount(false, false); break; } // boot with new version and soft-upgrade case PH_SOFT_UPGRADE: { PreparedStatement ps = prepareStatement(updateStatsSQL); String[] tables = dis.getTableNames(); // Update statistics on all relevant tables. for (int i=0; i < tables.length; i++) { ps.setString(1, tables[i]); ps.executeUpdate(); } // After soft upgrade and update statistics expect the // orphaned index entry to be deleted, but the "unneeded // disposable entries" are only deleted after hard upgrade. dis.assertStatsCount(true, false); break; } // soft-downgrade: boot with old version after soft-upgrade case PH_POST_SOFT_UPGRADE: { // expect no change in entries on downgrade, should be same // as they were in soft upgrade. dis.assertStatsCount(true, false); break; } // boot with new version and hard-upgrade case PH_HARD_UPGRADE: { // expect no change in entries on upgrade before update // statistics. dis.assertStatsCount(true, false); PreparedStatement ps = prepareStatement(updateStatsSQL); String[] tables = dis.getTableNames(); for (int i=0; i < tables.length; i++) { ps.setString(1, tables[i]); ps.executeUpdate(); } // Confirm that we disposed of the statistics that were added // due to a bug or simply not needed by Derby. try { dis.assertStatsCount(true, true); } finally { for (int i=0; i < tables.length; i++) { dropTable(tables[i]); } } commit(); break; } } } }
/* * #%L * ===================================================== * _____ _ ____ _ _ _ _ * |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | | * | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| | * | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ | * |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_| * \____/ * * ===================================================== * * Hochschule Hannover * (University of Applied Sciences and Arts, Hannover) * Faculty IV, Dept. of Computer Science * Ricklinger Stadtweg 118, 30459 Hannover, Germany * * Email: trust@f4-i.fh-hannover.de * Website: http://trust.f4.hs-hannover.de/ * * This file is part of irongui, version 0.4.8, * implemented by the Trust@HsH research group at the Hochschule Hannover. * %% * Copyright (C) 2010 - 2015 Trust@HsH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package de.hshannover.f4.trust.irongui.view; import java.awt.BorderLayout; import java.awt.Container; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import javax.swing.Box; import javax.swing.DefaultListModel; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JSeparator; import javax.swing.JSplitPane; import javax.swing.JTabbedPane; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javax.swing.border.EtchedBorder; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import de.hshannover.f4.trust.irongui.communication.Connection; import de.hshannover.f4.trust.irongui.util.ResourceHelper; import de.hshannover.f4.trust.irongui.view.component.MetadataPanel; import de.hshannover.f4.trust.irongui.view.component.NavigationPanel; import de.hshannover.f4.trust.irongui.view.component.ToolbarPanel; @SuppressWarnings("serial") public class MainFrame extends JFrame { ViewController mViewController; MetadataPanel mMetadataPanel; private JPanel mStatusPanel; NavigationPanel mNavigationPanel; ToolbarPanel mToolbarPanel; JTabbedPane mTabbedPane; JLabel mStatus; private JMenuBar mMenuBar; private JMenu mMnConnection; private JMenu mMnEdit; private JMenuItem mMntmManageConnections; private JSeparator mSeparator; private JSeparator mSeparator1; public JMenu mMenuConnectTo; private JMenuItem mMntmQuitProgram; private JMenuItem mMntmPublisherColor; private JMenu mMnAbout; private JMenuItem mMntmIrongui; public MainFrame(ViewController vc) { mMenuBar = new JMenuBar(); setJMenuBar(mMenuBar); mMnConnection = new JMenu("Connection"); mMenuBar.add(mMnConnection); mMntmManageConnections = new JMenuItem("Manage connections..."); mMntmManageConnections.setMnemonic(KeyEvent.VK_M); mMntmManageConnections.setIcon(ResourceHelper .getImage("computer_add_16x16.png")); mMnConnection.add(mMntmManageConnections); mMntmManageConnections.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { mViewController.showConnectionDialog(); } }); mSeparator = new JSeparator(); mMnConnection.add(mSeparator); mMenuConnectTo = new JMenu("Connect to"); mMenuConnectTo.setIcon(ResourceHelper.getImage("control_play_blue.png")); mMnConnection.add(mMenuConnectTo); mSeparator1 = new JSeparator(); mMnConnection.add(mSeparator1); mMntmQuitProgram = new JMenuItem("Exit"); mMntmQuitProgram.setIcon(ResourceHelper.getImage("cancel.png")); mMntmQuitProgram.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { dispose(); System.exit(0); } }); mMnConnection.add(mMntmQuitProgram); mMnEdit = new JMenu("Edit"); mMenuBar.add(mMnEdit); mMntmPublisherColor = new JMenuItem("Publisher color..."); mMntmPublisherColor.setIcon(ResourceHelper .getImage("color_management_16x16.png")); mMntmPublisherColor.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { mViewController.showPublisherDialog(); } }); mMnEdit.add(mMntmPublisherColor); /* * mnAction = new JMenu("Action"); menuBar.add(mnAction); * * mntmSubscribe = new JMenuItem("Subscribe..."); * mntmSubscribe.setIcon(ResourceHelper.getImage("rss_16x16.png")); * mntmSubscribe.addActionListener(new ActionListener() { * * @Override public void actionPerformed(ActionEvent e) { * mViewController.showSubsriptionDialog(); } }); * * mnAction.add(mntmSubscribe); */ mMenuBar.add(Box.createHorizontalGlue()); mMnAbout = new JMenu("About"); mMenuBar.add(mMnAbout); mMntmIrongui = new JMenuItem("Irongui ..."); mMntmIrongui.setIcon(ResourceHelper.getImage("information.png")); mMntmIrongui.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { JOptionPane .showMessageDialog( MainFrame.this, "irongui 0.4.2\n" + "Keep in mind that irongui is an experimental MAPC.\n" + "You might encounter bugs, primarily when metadata\n" + "is missing or metadata that was deleted is still\n" + "rendered by irongui. Work around: restart irongui\n" + "and start over again ;)\n", "About", JOptionPane.INFORMATION_MESSAGE, ResourceHelper.getAppIconImage()); } }); mMnAbout.add(mMntmIrongui); mViewController = vc; init(); } private void init() { Container mainPanel = this.getContentPane(); mainPanel.setLayout(new BorderLayout()); mToolbarPanel = new ToolbarPanel(); getContentPane().add(mToolbarPanel, BorderLayout.NORTH); JSplitPane splitPane1 = new JSplitPane(); splitPane1.setOneTouchExpandable(true); splitPane1.setOrientation(JSplitPane.VERTICAL_SPLIT); getContentPane().add(splitPane1, BorderLayout.CENTER); JSplitPane splitPane2 = new JSplitPane(); splitPane2.setOneTouchExpandable(true); splitPane1.setLeftComponent(splitPane2); mNavigationPanel = new NavigationPanel(); splitPane2.setLeftComponent(mNavigationPanel); mTabbedPane = new JTabbedPane(JTabbedPane.TOP); mTabbedPane.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT); mTabbedPane.setFocusable(false); splitPane2.setRightComponent(mTabbedPane); mMetadataPanel = new MetadataPanel(); splitPane1.setRightComponent(mMetadataPanel); splitPane1.setDividerLocation(450); mStatusPanel = new JPanel(); mStatusPanel .setBorder(new EtchedBorder(EtchedBorder.LOWERED, null, null)); FlowLayout flowLayout = (FlowLayout) mStatusPanel.getLayout(); flowLayout.setAlignment(FlowLayout.RIGHT); getContentPane().add(mStatusPanel, BorderLayout.SOUTH); mStatusPanel.setPreferredSize(new Dimension(getWidth(), 24)); mStatus = new JLabel("irongui started successfully."); mStatusPanel.add(mStatus); /********************************************************************************* * ActionListener */ // Connection Button mToolbarPanel.mButtonConnections.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { mViewController.showConnectionDialog(); } }); // Quick subscribe mNavigationPanel.mButtonSubscribe.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { mViewController .showQuickSubscribe(mNavigationPanel.mButtonSubscribe); } }); // Tab selected mTabbedPane.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent arg0) { mViewController.tabChanged(); } }); // A single connection in Navigator-Panel is selected mNavigationPanel.mPanelList.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2) { DefaultListModel model = (DefaultListModel) mNavigationPanel.mPanelList .getModel(); Connection con = (Connection) model .get(mNavigationPanel.mPanelList.getSelectedIndex()); mViewController.selectConnection(con); } } }); mNavigationPanel.mPanelList .addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { if (mNavigationPanel.mPanelList.getSelectedIndex() != -1) { DefaultListModel model = (DefaultListModel) mNavigationPanel.mPanelList .getModel(); Connection con = (Connection) model .get(mNavigationPanel.mPanelList .getSelectedIndex()); mViewController .selectConnectionWithoutCreatingTab(con); } } }); // Connect mNavigationPanel.mButtonPlay.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { SwingUtilities.invokeLater(new Runnable() { public void run() { final int index = mNavigationPanel.mPanelList .getSelectedIndex(); if (index > -1) { DefaultListModel model = (DefaultListModel) mNavigationPanel.mPanelList .getModel(); Connection con = (Connection) model.get(index); mViewController.selectConnection(con); mViewController.toggleConnection(); } else { mNavigationPanel.mButtonPlay.setSelected(false); JOptionPane .showMessageDialog(MainFrame.this, "Please select a connection on the left side first."); } } }); } }); // Start dump button selected /* * toolbarPanel.buttonStartDump.addActionListener(new ActionListener() { * * @Override public void actionPerformed(ActionEvent e) { * mViewController.startDump(); } }); */ // Subscribe... /* * navigationPanel.buttonSubscribe.addActionListener(new * ActionListener() { * * @Override public void actionPerformed(ActionEvent e) { * mViewController.showSubsriptionDialog(); } }); */ // Tree button mNavigationPanel.mButtonTree.addItemListener(new ItemListener() { @Override public void itemStateChanged(ItemEvent e) { int state = e.getStateChange(); if (state == ItemEvent.SELECTED) { mViewController.treeButtonClicked(); } } }); // Connection button mNavigationPanel.mButtonConnections.addItemListener(new ItemListener() { @Override public void itemStateChanged(ItemEvent e) { int state = e.getStateChange(); if (state == ItemEvent.SELECTED) { mViewController.conButtonClicked(); } } }); // Publisher button mToolbarPanel.mButtonPublisher.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { mViewController.showPublisherDialog(); } }); // Stop animation mToolbarPanel.mButtonAnimation.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { mViewController.toggleAnimation(); } }); } public static void main(String[] args) { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Throwable e) { e.printStackTrace(); } MainFrame f = new MainFrame(null); f.setSize(1024, 768); f.setLocationRelativeTo(null); f.setVisible(true); } }
/* * Copyright 2006 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.compiler.rule.builder; import java.util.Optional; import org.drools.compiler.builder.impl.KnowledgeBuilderImpl; import org.drools.compiler.compiler.Dialect; import org.drools.compiler.compiler.DialectCompiletimeRegistry; import org.drools.compiler.compiler.RuleBuildError; import org.drools.compiler.lang.descr.QueryDescr; import org.drools.compiler.lang.descr.RuleDescr; import org.drools.core.base.TypeResolver; import org.drools.core.beliefsystem.abductive.Abductive; import org.drools.core.definitions.InternalKnowledgePackage; import org.drools.core.definitions.rule.impl.RuleImpl; import org.drools.core.rule.AbductiveQuery; import org.drools.core.rule.EntryPointId; import org.drools.core.rule.Pattern; import org.drools.core.rule.QueryImpl; import org.drools.core.spi.DeclarationScopeResolver; import org.kie.api.runtime.rule.RuleUnit; /** * A context for the current build */ public class RuleBuildContext extends PackageBuildContext { // current rule private final RuleImpl rule; // current Rule descriptor private final RuleDescr ruleDescr; // available declarationResolver private DeclarationScopeResolver declarationResolver; // a simple counter for patterns private int patternId = -1; private final DroolsCompilerComponentFactory compilerFactory; private boolean needStreamMode = false; private Pattern prefixPattern; private boolean inXpath; /** * Default constructor */ public RuleBuildContext(final KnowledgeBuilderImpl kBuilder, final RuleDescr ruleDescr, final DialectCompiletimeRegistry dialectCompiletimeRegistry, final InternalKnowledgePackage pkg, final Dialect defaultDialect) { this.ruleDescr = ruleDescr; if ( ruleDescr instanceof QueryDescr ) { Abductive abductive = ruleDescr.getTypedAnnotation( Abductive.class ); if ( abductive == null ) { this.rule = new QueryImpl( ruleDescr.getName() ); } else { this.rule = new AbductiveQuery( ruleDescr.getName(), abductive.mode() ); } } else { this.rule = ruleDescr.toRule(); } this.rule.setPackage(pkg.getName()); this.rule.setDialect(ruleDescr.getDialect()); this.rule.setLoadOrder( ruleDescr.getLoadOrder() ); init(kBuilder, pkg, ruleDescr, dialectCompiletimeRegistry, defaultDialect, this.rule); if (this.rule.getDialect() == null) { this.rule.setDialect(getDialect().getId()); } if (ruleDescr.getUnit() != null) { rule.setRuleUnitClassName( pkg.getName() + "." + ruleDescr.getUnit().getTarget().replace( '.', '$' ) ); } Dialect dialect = getDialect(); if (dialect != null ) { dialect.init( ruleDescr ); } this.compilerFactory = kBuilder.getBuilderConfiguration().getComponentFactory(); this.declarationResolver = new DeclarationScopeResolver( kBuilder.getGlobals(), getPkg() ); } /** * Returns the current Rule being built */ public RuleImpl getRule() { return this.rule; } /** * Returns the current RuleDescriptor */ public RuleDescr getRuleDescr() { return this.ruleDescr; } /** * Returns the available declarationResolver instance */ public DeclarationScopeResolver getDeclarationResolver() { return this.declarationResolver; } /** * Sets the available declarationResolver instance */ public void setDeclarationResolver(final DeclarationScopeResolver declarationResolver) { this.declarationResolver = declarationResolver; } public int getNextPatternId() { return ++this.patternId; } public DroolsCompilerComponentFactory getCompilerFactory() { return compilerFactory; } public boolean needsStreamMode() { return needStreamMode; } public void setNeedStreamMode() { this.needStreamMode = true; } public void setPrefixPattern(Pattern prefixPattern) { this.prefixPattern = prefixPattern; } public Pattern getPrefixPattern() { return prefixPattern; } public boolean isInXpath() { return inXpath; } public void setInXpath( boolean inXpath ) { this.inXpath = inXpath; } public void initRule() { initRuleUnitClassName(); declarationResolver.setRule( rule ); } @Override public Class< ? > resolveVarType(String identifier) { return getDeclarationResolver().resolveVarType( identifier ); } private void initRuleUnitClassName() { String ruleUnitClassName = rule.getRuleUnitClassName(); boolean nameInferredFromResource = false; if ( ruleUnitClassName == null && rule.getResource() != null && rule.getResource().getSourcePath() != null ) { String drlPath = rule.getResource().getSourcePath(); int lastSep = drlPath.lastIndexOf( '/' ); if (lastSep >= 0) { drlPath = drlPath.substring( lastSep+1 ); } ruleUnitClassName = rule.getPackage() + "." + drlPath.substring( 0, drlPath.lastIndexOf( '.' ) ).replace( '/', '.' ); nameInferredFromResource = true; } if (ruleUnitClassName != null) { TypeResolver typeResolver = getPkg().getTypeResolver(); boolean unitFound = false; try { unitFound = RuleUnit.class.isAssignableFrom( Class.forName(ruleUnitClassName, true, typeResolver.getClassLoader()) ); if (unitFound && nameInferredFromResource) { rule.setRuleUnitClassName( ruleUnitClassName ); } } catch (ClassNotFoundException e) { // ignore } if (!unitFound && !nameInferredFromResource) { addError( new RuleBuildError( rule, getParentDescr(), null, ruleUnitClassName + " is not a valid RuleUnit class name" ) ); } } } public Optional<EntryPointId> getEntryPointId(String name) { return getPkg().getRuleUnitRegistry().getRuleUnitFor( getRule() ).flatMap( ruDescr -> ruDescr.getEntryPointId(name) ); } }
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.soytree; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.template.soy.base.internal.IdGenerator; import com.google.template.soy.basetree.AbstractNodeVisitor; import com.google.template.soy.basetree.CopyState; import com.google.template.soy.basetree.Node; import com.google.template.soy.basetree.NodeVisitor; import com.google.template.soy.basetree.ParentNode; import com.google.template.soy.exprtree.ExprNode; import com.google.template.soy.exprtree.ExprRootNode; import com.google.template.soy.soytree.SoyNode.ExprHolderNode; import com.google.template.soy.soytree.SoyNode.ParentSoyNode; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.List; /** * Shared utilities for the 'soytree' package. * * <p>Important: Do not use outside of Soy code (treat as superpackage-private). * */ public final class SoyTreeUtils { private static final Joiner COMMA_JOINER = Joiner.on(", "); private SoyTreeUtils() {} /** Returns true if the given {@code node} contains any children of the given types. */ @SafeVarargs public static boolean hasNodesOfType(Node node, final Class<? extends Node>... types) { class Visitor implements NodeVisitor<Node, VisitDirective> { boolean found; @Override public VisitDirective exec(Node node) { for (Class type : types) { if (type.isInstance(node)) { found = true; return VisitDirective.ABORT; } } return VisitDirective.CONTINUE; } } Visitor v = new Visitor(); visitAllNodes(node, v); return v.found; } /** Returns true if the given {@code node} contains any children that are HTML nodes. */ public static boolean hasHtmlNodes(Node node) { return hasNodesOfType( node, HtmlOpenTagNode.class, HtmlCloseTagNode.class, HtmlCommentNode.class, HtmlAttributeNode.class, HtmlAttributeValueNode.class); } /** An enum that allows a {#visitAllNodes} visitor to control how the AST is traversed. */ public enum VisitDirective { /** * This means that the childrent of the current node should not be visited, but traversal should * continue. */ SKIP_CHILDREN, /** This means that the whole visit operation should be abrubptly halted. */ ABORT, /** This means that traversal should continue as normal. */ CONTINUE; } /** * Runs the visitor on all nodes (including {@link ExprNode expr nodes}) reachable from the given * node. The order of visiting is breadth first. * * <p>If the visitor return {@code false} from {@link NodeVisitor#exec(Node)} we will short * circuit visiting. */ public static void visitAllNodes(Node node, NodeVisitor<? super Node, VisitDirective> visitor) { ArrayDeque<Node> queue = new ArrayDeque<>(); queue.add(node); Node current; while ((current = queue.poll()) != null) { switch (visitor.exec(current)) { case ABORT: return; case CONTINUE: if (current instanceof ParentNode<?>) { queue.addAll(((ParentNode<?>) current).getChildren()); } if (current instanceof ExprHolderNode) { queue.addAll(((ExprHolderNode) current).getExprList()); } continue; case SKIP_CHILDREN: continue; default: throw new AssertionError(); } } } /** * Retrieves all nodes in a tree that are an instance of a particular class. * * @param <T> The type of node to retrieve. * @param rootSoyNode The parse tree to search. * @param classObject The class whose instances to search for, including subclasses. * @return The nodes in the order they appear. */ public static <T extends Node> ImmutableList<T> getAllNodesOfType( Node rootSoyNode, final Class<T> classObject) { final ImmutableList.Builder<T> matchedNodesBuilder = ImmutableList.builder(); // optimization to avoid navigating into expr trees if we can't possibly match anything final boolean exploreExpressions = ExprNode.class.isAssignableFrom(classObject); visitAllNodes( rootSoyNode, new NodeVisitor<Node, VisitDirective>() { @Override public VisitDirective exec(Node node) { if (classObject.isInstance(node)) { matchedNodesBuilder.add(classObject.cast(node)); } if (!exploreExpressions && node instanceof ExprNode) { return VisitDirective.SKIP_CHILDREN; } return VisitDirective.CONTINUE; } }); return matchedNodesBuilder.build(); } /** * Given a Soy node, returns a {@code StringBuilder} that can be used to pretty print the AST * structure. * * <p>For example, for the following soy source <code><pre> * {for i in range(5)} * {if $i % 2 == 0} * foo * {/if} * {/for} * </pre></code> This method prints the AST string as follow: <code><pre> * FOR_NODE * IF_NODE * IF_COND_NODE * PRINT_NODE * </pre></code> * * @param node The root of the AST. * @param indent The indentation for each level. * @param sb The StringBuilder instance used for recursion. * @return The StringBuilder instance. */ public static StringBuilder buildAstString(ParentSoyNode<?> node, int indent, StringBuilder sb) { for (SoyNode child : node.getChildren()) { sb.append(Strings.repeat(" ", indent)).append(child.getKind()).append('\n'); if (child instanceof ParentSoyNode) { buildAstString((ParentSoyNode<?>) child, indent + 1, sb); } } return sb; } /** Similar to {@link #buildAstString}, but also print the source string for debug usages. */ public static StringBuilder buildAstStringWithPreview( ParentSoyNode<?> node, int indent, StringBuilder sb) { for (SoyNode child : node.getChildren()) { sb.append(Strings.repeat(" ", indent)) .append(child.getKind()) .append(": ") .append(child.toSourceString()) .append('\n'); if (child instanceof ParentSoyNode) { buildAstString((ParentSoyNode<?>) child, indent + 1, sb); } } return sb; } // ----------------------------------------------------------------------------------------------- // Utils for executing an ExprNode visitor on all expressions in a Soy tree. /** * Given a Soy node and a visitor for expression trees, traverses the subtree of the node and * executes the visitor on all expressions held by nodes in the subtree. * * <p>Only processes expressions in V2 syntax. Ignores all expressions in V1 syntax. * * @param <R> The ExprNode visitor's return type. * @param node The root of the subtree to visit all expressions in. * @param exprNodeVisitor The visitor to execute on all expressions. */ public static <R> void execOnAllV2Exprs( SoyNode node, final AbstractNodeVisitor<ExprNode, R> exprNodeVisitor) { visitAllNodes( node, new NodeVisitor<Node, VisitDirective>() { @Override public VisitDirective exec(Node node) { if (node instanceof ExprHolderNode) { for (ExprRootNode expr : ((ExprHolderNode) node).getExprList()) { exprNodeVisitor.exec(expr); } } else if (node instanceof ExprNode) { return VisitDirective.SKIP_CHILDREN; } return VisitDirective.CONTINUE; } }); } // ----------------------------------------------------------------------------------------------- // Utils for cloning. /** * Clones the given node and then generates and sets new ids on all the cloned nodes (by default, * SoyNode.copy(copyState) creates cloned nodes with the same ids as the original nodes). * * <p>This function will use the original Soy tree's node id generator to generate the new node * ids for the cloned nodes. Thus, the original node to be cloned must be part of a full Soy tree. * However, this does not mean that the cloned node will become part of the original tree (unless * it is manually attached later). The cloned node will be an independent subtree with parent set * to null. * * @param <T> The type of the node being cloned. * @param origNode The original node to be cloned. This node must be part of a full Soy tree, * because the generator for the new node ids will be retrieved from the root (SoyFileSetNode) * of the tree. * @param nodeIdGen The ID generator used for the tree. * @return The cloned node, with all new ids for its subtree. */ public static <T extends SoyNode> T cloneWithNewIds(T origNode, IdGenerator nodeIdGen) { // Clone the node. @SuppressWarnings("unchecked") T clone = (T) origNode.copy(new CopyState()); // Generate new ids. (new GenNewIdsVisitor(nodeIdGen)).exec(clone); return clone; } /** * Clones the given list of nodes and then generates and sets new ids on all the cloned nodes (by * default, SoyNode.copy(copyState) creates cloned nodes with the same ids as the original nodes). * * <p>This function will use the original Soy tree's node id generator to generate the new node * ids for the cloned nodes. Thus, the original nodes to be cloned must be part of a full Soy * tree. However, this does not mean that the cloned nodes will become part of the original tree * (unless they are manually attached later). The cloned nodes will be independent subtrees with * parents set to null. * * @param <T> The type of the nodes being cloned. * @param origNodes The original nodes to be cloned. These nodes must be part of a full Soy tree, * because the generator for the new node ids will be retrieved from the root (SoyFileSetNode) * of the tree. * @param nodeIdGen The ID generator used for the tree. * @return The cloned nodes, with all new ids for their subtrees. */ public static <T extends SoyNode> List<T> cloneListWithNewIds( List<T> origNodes, IdGenerator nodeIdGen) { Preconditions.checkNotNull(origNodes); List<T> clones = new ArrayList<>(origNodes.size()); for (T origNode : origNodes) { @SuppressWarnings("unchecked") T clone = (T) origNode.copy(new CopyState()); (new GenNewIdsVisitor(nodeIdGen)).exec(clone); clones.add(clone); } return clones; } /** Private helper for cloneWithNewIds() to set new ids on a cloned subtree. */ private static class GenNewIdsVisitor extends AbstractNodeVisitor<SoyNode, Void> { /** The generator for new node ids. */ private IdGenerator nodeIdGen; /** @param nodeIdGen The generator for new node ids. */ public GenNewIdsVisitor(IdGenerator nodeIdGen) { this.nodeIdGen = nodeIdGen; } @Override protected void visit(SoyNode node) { node.setId(nodeIdGen.genId()); if (node instanceof ParentSoyNode<?>) { visitChildren((ParentSoyNode<?>) node); } } } // ----------------------------------------------------------------------------------------------- // Miscellaneous. /** Returns true if {@code node} is a descendant of {@code ancestor}. */ public static boolean isDescendantOf(SoyNode node, SoyNode ancestor) { for (; node != null; node = node.getParent()) { if (ancestor == node) { return true; } } return false; } public static String toSourceString(List<? extends Node> nodes) { List<String> strings = new ArrayList<String>(nodes.size()); for (Node node : nodes) { strings.add(node.toSourceString()); } return COMMA_JOINER.join(strings); } }
package org.embulk.input.http; import org.apache.commons.io.IOUtils; import org.apache.http.Header; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicHeader; import org.embulk.config.ConfigDiff; import org.embulk.config.ConfigSource; import org.embulk.config.TaskReport; import org.embulk.config.TaskSource; import org.embulk.input.http.HttpRequestBuilder.GetHttpRequestBuilder; import org.embulk.input.http.HttpRequestBuilder.PostHttpRequestBuilder; import org.embulk.spi.Exec; import org.embulk.spi.FileInputPlugin; import org.embulk.spi.TransactionalFileInput; import org.embulk.util.config.Config; import org.embulk.util.config.ConfigDefault; import org.embulk.util.config.ConfigMapper; import org.embulk.util.config.ConfigMapperFactory; import org.embulk.util.config.Task; import org.embulk.util.config.TaskMapper; import org.embulk.util.file.InputStreamFileInput; import org.embulk.util.retryhelper.RetryExecutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URISyntaxException; import java.nio.file.Files; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import static java.lang.String.format; public class HttpFileInputPlugin implements FileInputPlugin { private static final Logger LOGGER = LoggerFactory.getLogger(HttpFileInputPlugin.class); private static final ConfigMapperFactory CONFIG_MAPPER_FACTORY = ConfigMapperFactory.builder().addDefaultModules().build(); private static final Map<HttpMethod, HttpRequestBuilder> HTTP_REQUEST_BUILDERS = Collections.unmodifiableMap( new HashMap<HttpMethod, HttpRequestBuilder>() { { put(HttpMethod.GET, new GetHttpRequestBuilder()); put(HttpMethod.POST, new PostHttpRequestBuilder()); } }); @Override public ConfigDiff transaction(ConfigSource config, FileInputPlugin.Control control) { final ConfigMapper configMapper = CONFIG_MAPPER_FACTORY.createConfigMapper(); final PluginTask task = configMapper.map(config, PluginTask.class); final List<List<QueryOption.Query>> queries; if (task.getParams().isPresent()) { queries = task.getParams().get().generateQueries(task.getPager().orElse(null)); } else if (task.getPager().isPresent()) { queries = task.getPager().get().expand(); } else { queries = Collections.emptyList(); } task.setQueries(queries); task.setHttpMethod(HttpMethod.valueOf(task.getMethod().toUpperCase())); return resume(task.toTaskSource(), queries.isEmpty() ? 1 : queries.size(), control); } @Override public ConfigDiff resume(TaskSource taskSource, int taskCount, FileInputPlugin.Control control) { control.run(taskSource, taskCount); return CONFIG_MAPPER_FACTORY.newConfigDiff(); } @Override public void cleanup(TaskSource taskSource, int taskCount, List<TaskReport> successTaskReports) {} @Override public TransactionalFileInput open(TaskSource taskSource, int taskIndex) { final TaskMapper taskMapper = CONFIG_MAPPER_FACTORY.createTaskMapper(); PluginTask task = taskMapper.map(taskSource, PluginTask.class); HttpRequestBase request; try { request = httpRequestFrom(task, taskIndex); } catch (URISyntaxException | UnsupportedEncodingException e) { throw new RuntimeException(e); } HttpClientBuilder builder = HttpClientBuilder.create() .disableAutomaticRetries() .setDefaultRequestConfig(requestConfigFrom(task)) .setDefaultHeaders(requestHeadersFrom(task)); if (task.getBasicAuth().isPresent()) { builder.setDefaultCredentialsProvider( makeCredentialsProvider(task.getBasicAuth().get(), request)); } LOGGER.info( format( Locale.ENGLISH, "%s \"%s\"", task.getMethod().toUpperCase(), request.getURI().toString())); long startTimeMills = System.currentTimeMillis(); try { InputStream stream = retryExecutorFrom(task) .runInterruptible(new RetryableHandler(builder.build(), request)) .getEntity() .getContent(); return new PluginFileInput( task, task.getInputDirect() ? stream : copyToFile(stream), startTimeMills); } catch (Exception e) { throw new RuntimeException(e); } } private InputStream copyToFile(InputStream input) throws IOException { File tmpfile = Files.createTempFile("embulk-input-http.", ".tmp").toFile(); tmpfile.deleteOnExit(); try (FileOutputStream output = new FileOutputStream(tmpfile)) { LOGGER.info(format(Locale.ENGLISH, "Writing response to %s", tmpfile)); IOUtils.copy(input, output); } finally { input.close(); } return new FileInputStream(tmpfile); } private CredentialsProvider makeCredentialsProvider( BasicAuthOption basicAuth, HttpRequestBase request) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); final AuthScope authScope = new AuthScope(request.getURI().getHost(), request.getURI().getPort()); credentialsProvider.setCredentials( authScope, new UsernamePasswordCredentials(basicAuth.getUser(), basicAuth.getPassword())); return credentialsProvider; } private static HttpRequestBase httpRequestFrom(PluginTask task, int taskIndex) throws URISyntaxException, UnsupportedEncodingException { HttpRequestBuilder builder = Optional.ofNullable(HTTP_REQUEST_BUILDERS.get(task.getHttpMethod())) .orElseThrow( () -> new IllegalArgumentException( String.format("Unsupported http method %s", task.getMethod()))); return builder.build( task, (task.getQueries().isEmpty()) ? null : task.getQueries().get(taskIndex)); } private static List<Header> requestHeadersFrom(PluginTask task) { Map<String, String> map = new HashMap<String, String>() { { put("Accept", "*/*"); put("Accept-Charset", task.getCharset()); put("Accept-Encoding", "gzip, deflate"); put("Accept-Language", "en-us,en;q=0.5"); put("User-Agent", task.getUserAgent()); } }; // Overwrite default headers by user defined headers task.getRequestHeaders().forEach(map::put); return Collections.unmodifiableList( map.entrySet().stream() .map(e -> new BasicHeader(e.getKey(), e.getValue())) .collect(Collectors.toList())); } private static RequestConfig requestConfigFrom(PluginTask task) { return RequestConfig.custom() .setCircularRedirectsAllowed(true) .setMaxRedirects(10) .setRedirectsEnabled(true) .setConnectTimeout(task.getOpenTimeout()) .setSocketTimeout(task.getReadTimeout()) .build(); } private static RetryExecutor retryExecutorFrom(PluginTask task) { return RetryExecutor.builder() .withRetryLimit(task.getMaxRetries()) .withInitialRetryWaitMillis(task.getRetryInterval()) .withMaxRetryWaitMillis(30 * 60 * 1000) // TODO be configurable .build(); } public enum HttpMethod { POST, GET } public interface PluginTask extends Task { @Config("url") String getUrl(); @Config("charset") @ConfigDefault("\"utf-8\"") String getCharset(); @Config("method") @ConfigDefault("\"get\"") String getMethod(); @Config("user_agent") @ConfigDefault("\"Embulk::Input::HttpFileInputPlugin\"") String getUserAgent(); @Config("open_timeout") @ConfigDefault("2000") int getOpenTimeout(); @Config("read_timeout") @ConfigDefault("10000") int getReadTimeout(); @Config("max_retries") @ConfigDefault("5") int getMaxRetries(); @Config("retry_interval") @ConfigDefault("10000") int getRetryInterval(); @Config("request_interval") @ConfigDefault("0") int getRequestInterval(); @Config("interval_includes_response_time") @ConfigDefault("null") boolean getIntervalIncludesResponseTime(); @Config("input_direct") @ConfigDefault("true") boolean getInputDirect(); @Config("params") @ConfigDefault("null") Optional<ParamsOption> getParams(); @Config("request_body") @ConfigDefault("null") Optional<String> getRequestBody(); @Config("basic_auth") @ConfigDefault("null") Optional<BasicAuthOption> getBasicAuth(); @Config("pager") @ConfigDefault("null") Optional<PagerOption> getPager(); @Config("request_headers") @ConfigDefault("{}") Map<String, String> getRequestHeaders(); List<List<QueryOption.Query>> getQueries(); void setQueries(List<List<QueryOption.Query>> queries); HttpMethod getHttpMethod(); void setHttpMethod(HttpMethod httpMethod); } public static class PluginFileInput extends InputStreamFileInput implements TransactionalFileInput { private static final Logger LOGGER = LoggerFactory.getLogger(HttpFileInputPlugin.class); private final long startTimeMills; private final PluginTask task; public PluginFileInput(PluginTask task, InputStream stream, long startTimeMills) { super(Exec.getBufferAllocator(), new SingleFileProvider(stream)); this.startTimeMills = startTimeMills; this.task = task; } public TaskReport commit() { return CONFIG_MAPPER_FACTORY.newTaskReport(); } @Override public void close() { super.close(); sleepByInterval(); } @Override public void abort() {} private void sleepByInterval() { final long interval = task.getIntervalIncludesResponseTime() ? task.getRequestInterval() - (System.currentTimeMillis() - startTimeMills) : task.getRequestInterval(); if (interval > 0) { LOGGER.info(String.format("Waiting %d milli sec ...", interval)); try { Thread.sleep(interval); } catch (InterruptedException e) { throw new RuntimeException(e); } } } private static class SingleFileProvider implements InputStreamFileInput.Provider { private final InputStream stream; private boolean opened = false; public SingleFileProvider(InputStream stream) { this.stream = stream; } @Override public InputStream openNext() { if (opened) { return null; } opened = true; return stream; } @Override public void close() throws IOException { if (!opened) { stream.close(); } } } } }
/* * Copyright 2017 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.kafka.spout; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.TopicPartition; import org.apache.storm.kafka.spout.config.builder.SingleTopicKafkaSpoutConfiguration; import org.apache.storm.spout.SpoutOutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.utils.Time; import org.apache.storm.utils.Time.SimulatedTime; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; import static org.apache.storm.kafka.spout.config.builder.SingleTopicKafkaSpoutConfiguration.createKafkaSpoutConfigBuilder; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyObject; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import org.apache.kafka.clients.consumer.internals.PartitionAssignor.Subscription; import org.apache.storm.kafka.spout.subscription.ManualPartitioner; import org.apache.storm.kafka.spout.subscription.TopicFilter; public class KafkaSpoutEmitTest { private final long offsetCommitPeriodMs = 2_000; private final TopologyContext contextMock = mock(TopologyContext.class); private final SpoutOutputCollector collectorMock = mock(SpoutOutputCollector.class); private final Map<String, Object> conf = new HashMap<>(); private final TopicPartition partition = new TopicPartition(SingleTopicKafkaSpoutConfiguration.TOPIC, 1); private KafkaConsumer<String, String> consumerMock; private KafkaSpoutConfig<String, String> spoutConfig; @Before public void setUp() { spoutConfig = createKafkaSpoutConfigBuilder(mock(TopicFilter.class), mock(ManualPartitioner.class), -1) .setOffsetCommitPeriodMs(offsetCommitPeriodMs) .build(); consumerMock = mock(KafkaConsumer.class); } @Test public void testNextTupleEmitsAtMostOneTuple() { //The spout should emit at most one message per call to nextTuple //This is necessary for Storm to be able to throttle the spout according to maxSpoutPending KafkaSpout<String, String> spout = SpoutWithMockedConsumerSetupHelper.setupSpout(spoutConfig, conf, contextMock, collectorMock, consumerMock, partition); Map<TopicPartition, List<ConsumerRecord<String, String>>> records = new HashMap<>(); records.put(partition, SpoutWithMockedConsumerSetupHelper.createRecords(partition, 0, 10)); when(consumerMock.poll(anyLong())) .thenReturn(new ConsumerRecords<>(records)); spout.nextTuple(); verify(collectorMock, times(1)).emit(anyString(), anyList(), any(KafkaSpoutMessageId.class)); } @Test public void testNextTupleEmitsFailedMessagesEvenWhenMaxUncommittedOffsetsIsExceeded() throws IOException { //The spout must reemit failed messages waiting for retry even if it is not allowed to poll for new messages due to maxUncommittedOffsets being exceeded //Emit maxUncommittedOffsets messages, and fail all of them. Then ensure that the spout will retry them when the retry backoff has passed try (SimulatedTime simulatedTime = new SimulatedTime()) { KafkaSpout<String, String> spout = SpoutWithMockedConsumerSetupHelper.setupSpout(spoutConfig, conf, contextMock, collectorMock, consumerMock, partition); Map<TopicPartition, List<ConsumerRecord<String, String>>> records = new HashMap<>(); int numRecords = spoutConfig.getMaxUncommittedOffsets(); //This is cheating a bit since maxPollRecords would normally spread this across multiple polls records.put(partition, SpoutWithMockedConsumerSetupHelper.createRecords(partition, 0, numRecords)); when(consumerMock.poll(anyLong())) .thenReturn(new ConsumerRecords<>(records)); for (int i = 0; i < numRecords; i++) { spout.nextTuple(); } ArgumentCaptor<KafkaSpoutMessageId> messageIds = ArgumentCaptor.forClass(KafkaSpoutMessageId.class); verify(collectorMock, times(numRecords)).emit(anyString(), anyList(), messageIds.capture()); for (KafkaSpoutMessageId messageId : messageIds.getAllValues()) { spout.fail(messageId); } reset(collectorMock); Time.advanceTime(50); //No backoff for test retry service, just check that messages will retry immediately for (int i = 0; i < numRecords; i++) { spout.nextTuple(); } ArgumentCaptor<KafkaSpoutMessageId> retryMessageIds = ArgumentCaptor.forClass(KafkaSpoutMessageId.class); verify(collectorMock, times(numRecords)).emit(anyString(), anyList(), retryMessageIds.capture()); //Verify that the poll started at the earliest retriable tuple offset List<Long> failedOffsets = new ArrayList<>(); for (KafkaSpoutMessageId msgId : messageIds.getAllValues()) { failedOffsets.add(msgId.offset()); } InOrder inOrder = inOrder(consumerMock); inOrder.verify(consumerMock).seek(partition, failedOffsets.get(0)); inOrder.verify(consumerMock).poll(anyLong()); } } @Test public void testSpoutWillSkipPartitionsAtTheMaxUncommittedOffsetsLimit() { //This verifies that partitions can't prevent each other from retrying tuples due to the maxUncommittedOffsets limit. try (SimulatedTime simulatedTime = new SimulatedTime()) { TopicPartition partitionTwo = new TopicPartition(SingleTopicKafkaSpoutConfiguration.TOPIC, 2); KafkaSpout<String, String> spout = SpoutWithMockedConsumerSetupHelper.setupSpout(spoutConfig, conf, contextMock, collectorMock, consumerMock, partition, partitionTwo); Map<TopicPartition, List<ConsumerRecord<String, String>>> records = new HashMap<>(); //This is cheating a bit since maxPollRecords would normally spread this across multiple polls records.put(partition, SpoutWithMockedConsumerSetupHelper.createRecords(partition, 0, spoutConfig.getMaxUncommittedOffsets())); records.put(partitionTwo, SpoutWithMockedConsumerSetupHelper.createRecords(partitionTwo, 0, spoutConfig.getMaxUncommittedOffsets() + 1)); int numMessages = spoutConfig.getMaxUncommittedOffsets()*2 + 1; when(consumerMock.poll(anyLong())) .thenReturn(new ConsumerRecords<>(records)); for (int i = 0; i < numMessages; i++) { spout.nextTuple(); } ArgumentCaptor<KafkaSpoutMessageId> messageIds = ArgumentCaptor.forClass(KafkaSpoutMessageId.class); verify(collectorMock, times(numMessages)).emit(anyString(), anyList(), messageIds.capture()); //Now fail a tuple on partition one and verify that it is allowed to retry, because the failed tuple is below the maxUncommittedOffsets limit Optional<KafkaSpoutMessageId> failedMessageIdPartitionOne = messageIds.getAllValues().stream() .filter(messageId -> messageId.partition() == partition.partition()) .findAny(); spout.fail(failedMessageIdPartitionOne.get()); //Also fail the last tuple from partition two. Since the failed tuple is beyond the maxUncommittedOffsets limit, it should not be retried until earlier messages are acked. Optional<KafkaSpoutMessageId> failedMessagePartitionTwo = messageIds.getAllValues().stream() .filter(messageId -> messageId.partition() == partitionTwo.partition()) .max((msgId, msgId2) -> (int)(msgId.offset() - msgId2.offset())); spout.fail(failedMessagePartitionTwo.get()); reset(collectorMock); Time.advanceTime(50); when(consumerMock.poll(anyLong())) .thenReturn(new ConsumerRecords<>(Collections.singletonMap(partition, SpoutWithMockedConsumerSetupHelper.createRecords(partition, failedMessageIdPartitionOne.get().offset(), 1)))); spout.nextTuple(); verify(collectorMock, times(1)).emit(anyObject(), anyObject(), anyObject()); InOrder inOrder = inOrder(consumerMock); inOrder.verify(consumerMock).seek(partition, failedMessageIdPartitionOne.get().offset()); //Should not seek on the paused partition inOrder.verify(consumerMock, never()).seek(eq(partitionTwo), anyLong()); inOrder.verify(consumerMock).pause(Collections.singleton(partitionTwo)); inOrder.verify(consumerMock).poll(anyLong()); inOrder.verify(consumerMock).resume(Collections.singleton(partitionTwo)); reset(collectorMock); //Now also check that no more tuples are polled for, since both partitions are at their limits spout.nextTuple(); verify(collectorMock, never()).emit(anyObject(), anyObject(), anyObject()); } } }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.lang.compiler.model.graph; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.function.Predicate; import org.junit.Test; import com.asakusafw.lang.compiler.model.graph.Operator.OperatorKind; /** * Test for {@link Operators}. */ public class OperatorsTest { private static final Predicate<Operator> MARKERS = operator -> operator.getOperatorKind() == OperatorKind.MARKER; /** * inputs. */ @Test public void getInputs() { MockOperators mock = new MockOperators() .operator("a", "i0,i1", "o0,o1") .operator("b", "i0,i1", "o0,o1"); Set<OperatorInput> expected = new HashSet<>(); expected.add(mock.getInput("a.i0")); expected.add(mock.getInput("a.i1")); expected.add(mock.getInput("b.i0")); expected.add(mock.getInput("b.i1")); assertThat( Operators.getInputs(mock.getAsSet("a", "b")), is(expected)); } /** * outputs. */ @Test public void getOutputs() { MockOperators mock = new MockOperators() .operator("a", "i0,i1", "o0,o1") .operator("b", "i0,i1", "o0,o1"); Set<OperatorOutput> expected = new HashSet<>(); expected.add(mock.getOutput("a.o0")); expected.add(mock.getOutput("a.o1")); expected.add(mock.getOutput("b.o0")); expected.add(mock.getOutput("b.o1")); assertThat( Operators.getOutputs(mock.getAsSet("a", "b")), is(expected)); } /** * succ. */ @Test public void getSuccessors() { MockOperators mock = new MockOperators() .operator("a", "in", "o0,o1,o2") .operator("b", "in", "out") .operator("c", "in", "out") .operator("d", "in", "out") .operator("e", "in", "out") .connect("a.o1", "b.in") .connect("a.o2", "c.in") .connect("a.o2", "d.in") .connect("b.out", "e.in"); assertThat(Operators.hasSuccessors(mock.get("a")), is(true)); assertThat(Operators.hasSuccessors(mock.get("e")), is(false)); assertThat(Operators.getSuccessors(mock.get("a")), is(mock.getAsSet("b", "c", "d"))); } /** * pred. */ @Test public void getPredecessors() { MockOperators mock = new MockOperators() .operator("a", "i0,i1,i2", "out") .operator("b", "in", "out") .operator("c", "in", "out") .operator("d", "in", "out") .operator("e", "in", "out") .connect("b.out", "a.i1") .connect("c.out", "a.i2") .connect("d.out", "a.i2") .connect("e.out", "b.in"); assertThat(Operators.hasPredecessors(mock.get("a")), is(true)); assertThat(Operators.hasPredecessors(mock.get("e")), is(false)); assertThat(Operators.getPredecessors(mock.get("a")), is(mock.getAsSet("b", "c", "d"))); } /** * succ*. */ @Test public void getTransitiveSuccessors() { MockOperators mock = new MockOperators() .operator("a0") .operator("a1") .operator("b0").connect("a0", "b0").connect("a1", "b0") .operator("b1") .operator("c0").connect("b0", "c0").connect("b1", "c0") .operator("d0").connect("c0", "d0") .operator("d1").connect("c0", "d1") .operator("e0").connect("d0", "e0") .operator("e1").connect("d0", "e1").connect("d1", "e1"); assertThat( Operators.getTransitiveSuccessors(mock.get("a0").getOutputs()), is(mock.getAsSet("b0", "c0", "d0", "d1", "e0", "e1"))); } /** * pred*. */ @Test public void getTransitivePredecessors() { MockOperators mock = new MockOperators() .operator("a0") .operator("a1") .operator("b0").connect("b0", "a0").connect("b0", "a1") .operator("b1") .operator("c0").connect("c0", "b0").connect("c0", "b1") .operator("d0").connect("d0", "c0") .operator("d1").connect("d1", "c0") .operator("e0").connect("e0", "d0") .operator("e1").connect("e1", "d0").connect("e1", "d1"); assertThat( Operators.getTransitivePredecessors(mock.get("a0").getInputs()), is(mock.getAsSet("b0", "c0", "d0", "d1", "e0", "e1"))); } /** * all. */ @Test public void getTransitiveConnected() { MockOperators mock = new MockOperators() .operator("a0") .operator("a1") .operator("b0").connect("a0", "b0").connect("a1", "b0") .operator("b1") .operator("c0").connect("b0", "c0").connect("b1", "c0") .operator("d0").connect("c0", "d0") .operator("d1").connect("c0", "d1") .operator("e0").connect("d0", "e0") .operator("e1").connect("d0", "e1").connect("d1", "e1"); assertThat( Operators.getTransitiveConnected(mock.getAsSet("a0")), is(mock.all())); } /** * find nearest. */ @Test public void findNearestReachableSuccessors() { MockOperators mock = new MockOperators() .operator("m0") .operator("a").connect("m0", "a") .operator("b").connect("a", "b") .marker("m1").connect("b", "m1") .operator("c").connect("m1", "c") .marker("m2").connect("c", "m2"); assertThat( Operators.findNearestReachableSuccessors(mock.get("a").getOutputs(), MARKERS), is(mock.getAsSet("m1"))); } /** * find nearest. */ @Test public void findNearestReachableSuccessors_complex() { MockOperators mock = new MockOperators() .operator("m0") .operator("a", "in", "o0,o1").connect("m0", "a") .operator("b").connect("a.o0", "b") .operator("c").connect("b", "c") .marker("m1").connect("c", "m1") .operator("d").connect("m1", "d") .operator("e").connect("d", "e").connect("c", "e") .marker("m2").connect("e", "m2") .marker("m3").connect("d", "m3") .marker("m4").connect("d", "m4").connect("a.o1", "m4"); assertThat( Operators.findNearestReachableSuccessors(mock.get("a").getOutputs(), MARKERS), is(mock.getAsSet("m1", "m2", "m4"))); } /** * find nearest. */ @Test public void findNearestReachablePredecessors() { MockOperators mock = new MockOperators() .operator("m0") .operator("a").connect("a", "m0") .operator("b").connect("b", "a") .marker("m1").connect("m1", "b") .operator("c").connect("c", "m1") .marker("m2").connect("m2", "c"); assertThat( Operators.findNearestReachablePredecessors(mock.get("a").getInputs(), MARKERS), is(mock.getAsSet("m1"))); } /** * find nearest. */ @Test public void findNearestReachablePredecessors_complex() { MockOperators mock = new MockOperators() .operator("m0") .operator("a", "i0,i1", "out").connect("a", "m0") .operator("b").connect("b", "a.i0") .operator("c").connect("c", "b") .marker("m1").connect("m1", "c") .operator("d").connect("d", "m1") .operator("e").connect("e", "d").connect("e", "c") .marker("m2").connect("m2", "e") .marker("m3").connect("m3", "d") .marker("m4").connect("m4", "d").connect("m4", "a.i1"); assertThat( Operators.findNearestReachablePredecessors(mock.get("a").getInputs(), MARKERS), is(mock.getAsSet("m1", "m2", "m4"))); } /** * collect until nearest. */ @Test public void collectUntilNearestReachableSuccessors_exclusive() { MockOperators mock = new MockOperators() .operator("m0") .operator("a").connect("m0", "a") .operator("b").connect("a", "b") .marker("m1").connect("b", "m1") .operator("c").connect("m1", "c") .marker("m2").connect("c", "m2"); assertThat( Operators.collectUntilNearestReachableSuccessors(mock.get("a").getOutputs(), MARKERS, false), is(mock.getAsSet("b"))); } /** * collect until nearest. */ @Test public void collectUntilNearestReachableSuccessors_inclusive() { MockOperators mock = new MockOperators() .operator("m0") .operator("a").connect("m0", "a") .operator("b").connect("a", "b") .marker("m1").connect("b", "m1") .operator("c").connect("m1", "c") .marker("m2").connect("c", "m2"); assertThat( Operators.collectUntilNearestReachableSuccessors(mock.get("a").getOutputs(), MARKERS, true), is(mock.getAsSet("b", "m1"))); } /** * collect until nearest. */ @Test public void collectUntilNearestReachableSuccessors_complex() { MockOperators mock = new MockOperators() .operator("m0") .operator("a", "in", "o0,o1").connect("m0", "a") .operator("b").connect("a.o0", "b") .operator("c").connect("b", "c") .marker("m1").connect("c", "m1") .operator("d").connect("m1", "d") .operator("e").connect("d", "e").connect("c", "e") .marker("m2").connect("e", "m2") .marker("m3").connect("d", "m3") .marker("m4").connect("d", "m4").connect("a.o1", "m4"); assertThat( Operators.collectUntilNearestReachableSuccessors(mock.get("a").getOutputs(), MARKERS, false), is(mock.getAsSet("b", "c", "e"))); } /** * collect until nearest. */ @Test public void collectUntilNearestReachablePredecessors_exclusive() { MockOperators mock = new MockOperators() .operator("m0") .operator("a").connect("a", "m0") .operator("b").connect("b", "a") .marker("m1").connect("m1", "b") .operator("c").connect("c", "m1") .marker("m2").connect("m2", "c"); assertThat( Operators.collectUntilNearestReachablePredecessors(mock.get("a").getInputs(), MARKERS, false), is(mock.getAsSet("b"))); } /** * collect until nearest. */ @Test public void collectUntilNearestReachablePredecessors_inclusive() { MockOperators mock = new MockOperators() .operator("m0") .operator("a").connect("a", "m0") .operator("b").connect("b", "a") .marker("m1").connect("m1", "b") .operator("c").connect("c", "m1") .marker("m2").connect("m2", "c"); assertThat( Operators.collectUntilNearestReachablePredecessors(mock.get("a").getInputs(), MARKERS, true), is(mock.getAsSet("b", "m1"))); } /** * collect until nearest. */ @Test public void collectUntilNearestReachablePredecessors_complex() { MockOperators mock = new MockOperators() .operator("m0") .operator("a", "i0,i1", "out").connect("a", "m0") .operator("b").connect("b", "a.i0") .operator("c").connect("c", "b") .marker("m1").connect("m1", "c") .operator("d").connect("d", "m1") .operator("e").connect("e", "d").connect("e", "c") .marker("m2").connect("m2", "e") .marker("m3").connect("m3", "d") .marker("m4").connect("m4", "d").connect("m4", "a.i1"); assertThat( Operators.collectUntilNearestReachablePredecessors(mock.get("a").getInputs(), MARKERS, false), is(mock.getAsSet("b", "c", "e"))); } /** * connectAll. */ @Test public void connectAll_many_downstreams() { MockOperators mock = new MockOperators() .operator("a") .operator("b") .operator("c"); Operators.connectAll( mock.getOutput("a"), Arrays.asList(mock.getInput("b"), mock.getInput("c"))); mock.assertConnected("a", "b") .assertConnected("a", "c"); } /** * connectAll. */ @Test public void connectAll_many_upstreams() { MockOperators mock = new MockOperators() .operator("a") .operator("b") .operator("c"); Operators.connectAll( Arrays.asList(mock.getOutput("a"), mock.getOutput("b")), mock.getInput("c")); mock.assertConnected("a", "c") .assertConnected("b", "c"); } /** * connectAll. */ @Test public void connectAll_product() { MockOperators mock = new MockOperators() .operator("a") .operator("b") .operator("c") .operator("d"); Operators.connectAll( Arrays.asList(mock.getOutput("a"), mock.getOutput("b")), Arrays.asList(mock.getInput("c"), mock.getInput("d"))); mock.assertConnected("a", "c") .assertConnected("b", "c") .assertConnected("a", "d") .assertConnected("b", "d"); } /** * insert. */ @Test public void insert_output() { MockOperators mock = new MockOperators() .operator("a") .operator("b").connect("a", "b") .operator("c").connect("a", "c") .operator("x"); Operators.insert(mock.get("x"), mock.getOutput("a")); mock.assertConnected("a", "x") .assertConnected("a", "b", false) .assertConnected("a", "c", false) .assertConnected("x", "b") .assertConnected("x", "c"); } /** * insert. */ @Test public void insert_input() { MockOperators mock = new MockOperators() .operator("a") .operator("b") .operator("c").connect("a", "c").connect("b", "c") .operator("x"); Operators.insert(mock.get("x"), mock.getInput("c")); mock.assertConnected("x", "c") .assertConnected("a", "c", false) .assertConnected("b", "c", false) .assertConnected("a", "x") .assertConnected("b", "x"); } /** * insert. */ @Test public void insert_connection() { MockOperators mock = new MockOperators() .operator("a") .operator("b") .operator("c").connect("a", "c").connect("b", "c") .operator("d").connect("a", "d").connect("b", "d") .operator("x"); Operators.insert(mock.get("x"), mock.getOutput("a"), mock.getInput("c")); mock.assertConnected("a", "c", false) .assertConnected("a", "d") .assertConnected("b", "c") .assertConnected("b", "d") .assertConnected("a", "x") .assertConnected("b", "x", false) .assertConnected("x", "c") .assertConnected("x", "d", false); } /** * insert. */ @Test public void remove() { MockOperators mock = new MockOperators() .operator("a") .operator("b") .operator("x").connect("a", "x").connect("b", "x") .operator("c").connect("x", "c") .operator("d").connect("x", "d"); Operators.remove(mock.get("x")); mock.assertConnected("a", "x", false) .assertConnected("b", "x", false) .assertConnected("x", "c", false) .assertConnected("x", "d", false) .assertConnected("a", "c") .assertConnected("a", "d") .assertConnected("b", "c") .assertConnected("b", "d"); } /** * replace. */ @Test public void replace() { MockOperators mock = new MockOperators() .operator("a") .operator("b") .operator("x").connect("a", "x").connect("b", "x") .operator("c").connect("x", "c") .operator("d").connect("x", "d") .operator("y"); Operators.replace(mock.get("x"), mock.get("y")); mock.assertConnected("a", "x", false) .assertConnected("b", "x", false) .assertConnected("x", "c", false) .assertConnected("x", "d", false) .assertConnected("a", "y") .assertConnected("b", "y") .assertConnected("y", "c") .assertConnected("y", "d") .assertConnected("a", "c", false) .assertConnected("a", "d", false) .assertConnected("b", "c", false) .assertConnected("b", "d", false); } }
package DAO.Campeonato; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import Control.Campeonato.Campeonato_control; import DAO.BDConexao_dao; import Model.Campeonato.Campeonato_model; public class Campeonato_dao { /** * Funcao para cadastro de campeonato * @param Campeonato_model campeonatoModel * @return boolean * */ public boolean cadastrarCampeonato (Campeonato_model campeonatoModel){ Connection conn = null; Statement query = null; int res = 0; try{ conn = BDConexao_dao.conectar(); conn.setAutoCommit(false); //Formato a data DateFormat dataFormataData = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); String sql = "INSERT INTO `bodyboardsys`.`campeonato` " + "(`idcampeonato`, " + "`nome`, " + "`dataInicio`, " + "`dataEncerramento`, " + "`dataCadastro`) " + "values " + "(null, " + "'"+campeonatoModel.getNome()+"', " + "'"+dataFormataData.format(campeonatoModel.getDataInicio())+"', " + "'"+dataFormataData.format(campeonatoModel.getDataEncerramento())+"', " + "NULL)"; query = conn.createStatement(); query.executeUpdate(sql); //implementacao para pegar o ultimo id gerado PreparedStatement ps = conn.prepareStatement("SELECT LAST_INSERT_ID()"); ResultSet rs = ps.executeQuery(); if (rs.next()) { res = rs.getInt(1); } //ResultSet chaveGerada = query.getGeneratedKeys(); //ResultSet rs = query.execute("SELECT LAST_INSERT_ID()"); /* int id = 0; if (rs.next()) { campeonatoModel.setIdcampeonato(rs.getInt(1)); }*/ conn.commit(); } catch(SQLException e){ System.out.println("Erro ao conectar com o banco: " + e.getMessage()); System.err.println("SQLException: " + e.getMessage()); System.err.println("SQLState: " + e.getSQLState()); System.err.println("VendorError: " + e.getErrorCode()); } finally{ try { query.close(); conn.close(); conn = null; query = null; } catch (SQLException e) { e.printStackTrace(); } } if(res == 0){ try { conn.rollback(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return false; }else{ campeonatoModel.setIdcampeonato(res); return true; } } public static ArrayList<Campeonato_model> relatorioCampeonato(){ Connection conn = null; PreparedStatement pstmt = null; ResultSet res = null; ArrayList<Campeonato_model> listaCampeonato = new ArrayList<Campeonato_model>(); try{ conn = BDConexao_dao.conectar(); String sql = "SELECT * FROM `bodyboardsys`.`campeonato`;"; pstmt = conn.prepareStatement(sql); res = pstmt.executeQuery(); listaCampeonato = Campeonato_control.carregarListaResultSet(res); } catch(SQLException e){ System.out.println("Erro ao conectar com o banco: " + e.getMessage()); System.err.println("SQLException: " + e.getMessage()); System.err.println("SQLState: " + e.getSQLState()); System.err.println("VendorError: " + e.getErrorCode()); } finally{ try { pstmt.close(); conn.close(); conn = null; pstmt = null; return listaCampeonato; } catch (SQLException e) { e.printStackTrace(); } } return null; } public static ArrayList<Campeonato_model> listar(Campeonato_model campeonatoModel, String ordenar){ Connection conn = null; PreparedStatement pstmt = null; ResultSet res = null; ArrayList<Campeonato_model> listaCampeonato = new ArrayList<Campeonato_model>(); //Filtro da minha query String filtro = ""; String ordem = ""; try{ if(ordenar != null){ ordem += BDConexao_dao.adicionaOrdem(ordem,ordenar,"ASC"); } if( campeonatoModel.getIdcampeonato() != 0 ){ String valor = " idcampeonato="; valor += Integer.toString(campeonatoModel.getIdcampeonato()); filtro += BDConexao_dao.adicionaFiltro(filtro, valor, ""); } if( campeonatoModel.getNome() != null && campeonatoModel.getNome() != "" ){ String valor = " nome like'"+campeonatoModel.getNome()+"%'"; filtro += BDConexao_dao.adicionaFiltro(filtro, valor, ""); } if( campeonatoModel.getStatus() != 0 ){ String valor = " status="; valor += Integer.toString(campeonatoModel.getStatus()); filtro += BDConexao_dao.adicionaFiltro(filtro, valor, ""); } conn = BDConexao_dao.conectar(); String sql = "SELECT * FROM `bodyboardsys`.`campeonato` " + filtro+ " " + ordem + ";"; pstmt = conn.prepareStatement(sql); res = pstmt.executeQuery(); listaCampeonato = Campeonato_control.carregarListaResultSet(res); } catch(SQLException e){ System.out.println("Erro ao conectar com o banco: " + e.getMessage()); System.err.println("SQLException: " + e.getMessage()); System.err.println("SQLState: " + e.getSQLState()); System.err.println("VendorError: " + e.getErrorCode()); } finally{ try { pstmt.close(); conn.close(); conn = null; pstmt = null; return listaCampeonato; } catch (SQLException e) { e.printStackTrace(); } } return null; } /** * Funcao que carrega um Campeonato_model apartir da instancia atual * @param Campeonato_model campeonatoModel * @return Campeonato_model campeonatoModel * */ public Campeonato_model carregar( Campeonato_model campeonatoModel ){ Connection conn = null; conn = BDConexao_dao.conectar(); //Filtro da minha query String filtro = ""; if( campeonatoModel.getIdcampeonato() != 0 ){ String valor = " idcampeonato="; valor += Integer.toString(campeonatoModel.getIdcampeonato()); filtro = BDConexao_dao.adicionaFiltro(filtro, valor, ""); } //preparando a query Statement query = null; String sql = "SELECT * FROM `bodyboardsys`.`campeonato` " + filtro; ResultSet res = null; try{ query = (Statement) conn.createStatement(); res = query.executeQuery(sql); } catch(SQLException e){ System.out.println("Erro ao conectar com o banco: " + e.getMessage()); System.err.println("SQLException: " + e.getMessage()); System.err.println("SQLState: " + e.getSQLState()); System.err.println("VendorError: " + e.getErrorCode()); return null; } try{ campeonatoModel = Campeonato_control.carregarResultSet(res); conn.close(); return campeonatoModel; } catch (Exception e) { return null; } } public boolean editar( Campeonato_model campeonatoModel ){ Connection conn = null; conn = BDConexao_dao.conectar(); Statement stmt = null; DateFormat dataFormataData = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); int res = 0; //Filtro da minha query String filtro = ""; if( campeonatoModel.getIdcampeonato() != 0 ){ String valor = " idcampeonato="; valor += Integer.toString(campeonatoModel.getIdcampeonato()); filtro = BDConexao_dao.adicionaFiltro(filtro, valor, ""); } String sql = "UPDATE `bodyboardsys`.`campeonato` " + "SET " + "nome = '"+campeonatoModel.getNome()+"'," + "dataInicio = '"+dataFormataData.format(campeonatoModel.getDataInicio())+"'," + "dataEncerramento = '"+dataFormataData.format(campeonatoModel.getDataEncerramento())+"' " + filtro; try{ conn.setAutoCommit(false); stmt = conn.createStatement(); res = stmt.executeUpdate(sql); } catch(SQLException e){ System.out.println("Erro ao conectar com o banco: " + e.getMessage()); System.err.println("SQLException: " + e.getMessage()); System.err.println("SQLState: " + e.getSQLState()); System.err.println("VendorError: " + e.getErrorCode()); try { // dou um rollback no BD caso ocorra alguma excessao ao atualizar o usuario conn.rollback(); return false; } catch (SQLException e2) { System.out.println("Erro ao conectar com o banco: " + e.getMessage()); System.err.println("SQLException: " + e.getMessage()); System.err.println("SQLState: " + e.getSQLState()); System.err.println("VendorError: " + e.getErrorCode()); return false; } } try{ //dou commit no BD das alteracoes do usuario conn.commit(); //fecho a conexao do BD conn.close(); return true; } catch (Exception e) { return false; } } /** * Funcao que carrega um campeonato inteiro com todas as etapas e baterias * @param Campeonato_model * return Campeonato_model * */ public Campeonato_model carregarCampeonatoInteiro( Campeonato_model campeonatoModel ){ Connection conn = null; conn = BDConexao_dao.conectar(); //Filtro da minha query String filtro = ""; if( campeonatoModel.getIdcampeonato() != 0 ){ String valor = " ce.idcampeonato="; valor += Integer.toString(campeonatoModel.getIdcampeonato()); filtro = BDConexao_dao.adicionaFiltro(filtro, valor, ""); } //preparando a query Statement query = null; String sql = "SELECT " + "* " + "FROM " + "`bodyboardsys`.`campeonato` c" + "INNER JOIN " + "`bodyboardsys`.`campeonatoetapa` ce ON ce.idcampeonato = c.idcampeonato " + "INNER JOIN " + "`bodyboardsys`.`bateria` b ON b.idcampeonatoetapa = ce.idcampeonatoetapa " + "" + filtro; ResultSet res = null; try{ query = (Statement) conn.createStatement(); res = query.executeQuery(sql); } catch(SQLException e){ System.out.println("Erro ao conectar com o banco: " + e.getMessage()); System.err.println("SQLException: " + e.getMessage()); System.err.println("SQLState: " + e.getSQLState()); System.err.println("VendorError: " + e.getErrorCode()); return null; } try{ campeonatoModel = Campeonato_control.carregarCampeonatoInteiroResultSet(res); conn.close(); return campeonatoModel; } catch (Exception e) { return null; } } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.listener.author; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.faces.event.AbortProcessingException; import javax.faces.event.ActionEvent; import javax.faces.event.ActionListener; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.tool.assessment.api.SamigoApiFactory; import org.sakaiproject.tool.assessment.data.dao.assessment.AssessmentAccessControl; import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentAccessControlIfc; import org.sakaiproject.tool.assessment.data.ifc.assessment.EvaluationModelIfc; import org.sakaiproject.tool.assessment.facade.AssessmentFacade; import org.sakaiproject.tool.assessment.services.assessment.AssessmentService; import org.sakaiproject.tool.assessment.shared.api.assessment.SecureDeliveryServiceAPI; import org.sakaiproject.tool.assessment.ui.bean.author.AssessmentSettingsBean; import org.sakaiproject.tool.assessment.ui.bean.author.AuthorBean; import org.sakaiproject.tool.assessment.ui.bean.authz.AuthorizationBean; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; import org.sakaiproject.tool.assessment.util.TextFormat; import org.sakaiproject.util.api.FormattedText; /** * <p>Title: Samigo</p>2 * <p>Description: Sakai Assessment Manager</p> * @author Ed Smiley * @version $Id$ */ @Slf4j public class SaveAssessmentSettingsListener implements ActionListener { //private static final GradebookServiceHelper gbsHelper = IntegrationContextFactory.getInstance().getGradebookServiceHelper(); //private static final boolean integrated = IntegrationContextFactory.getInstance().isIntegrated(); public SaveAssessmentSettingsListener() { } public void processAction(ActionEvent ae) throws AbortProcessingException { FacesContext context = FacesContext.getCurrentInstance(); AssessmentSettingsBean assessmentSettings = (AssessmentSettingsBean) ContextUtil. lookupBean("assessmentSettings"); boolean error=false; String assessmentId=String.valueOf(assessmentSettings.getAssessmentId()); AssessmentService assessmentService = new AssessmentService(); SaveAssessmentSettings s = new SaveAssessmentSettings(); String assessmentName = TextFormat.convertPlaintextToFormattedTextNoHighUnicode(assessmentSettings.getTitle()); // check if name is empty if(assessmentName!=null &&(assessmentName.trim()).equals("")){ String nameEmpty_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","assessmentName_empty"); context.addMessage(null,new FacesMessage(nameEmpty_err)); error=true; } // check if name is unique if(!assessmentService.assessmentTitleIsUnique(assessmentId,assessmentName,false)){ String nameUnique_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","assessmentName_error"); context.addMessage(null,new FacesMessage(nameUnique_err)); error=true; } // check if start date is valid if(!assessmentSettings.getIsValidStartDate()){ String startDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_start_date"); context.addMessage(null,new FacesMessage(startDateErr)); error=true; } // check if due date is valid if(!assessmentSettings.getIsValidDueDate()){ String dueDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_due_date"); context.addMessage(null,new FacesMessage(dueDateErr)); error=true; } // check if RetractDate needs to be nulled if ("2".equals(assessmentSettings.getLateHandling())){ assessmentSettings.setRetractDateString(null); } if(assessmentSettings.getDueDate() == null && assessmentSettings.getRetractDate() != null && AssessmentAccessControlIfc.ACCEPT_LATE_SUBMISSION.toString().equals(assessmentSettings.getLateHandling())){ String dueDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages", "due_null_with_retract_date"); context.addMessage(null,new FacesMessage(dueDateErr)); error = true; } // check if late submission date is valid if(!assessmentSettings.getIsValidRetractDate()){ String retractDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_retrack_date"); context.addMessage(null,new FacesMessage(retractDateErr)); error=true; } // check that retract is after due and due is not null if (!assessmentSettings.getIsRetractAfterDue()) { String retractDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages", "retract_earlier_than_due"); context.addMessage(null, new FacesMessage(retractDateErr)); error = true; } if (assessmentSettings.getReleaseTo().equals(AssessmentAccessControl.RELEASE_TO_SELECTED_GROUPS)) { String[] groupsAuthorized = assessmentSettings.getGroupsAuthorizedToSave(); //getGroupsAuthorized(); if (groupsAuthorized == null || groupsAuthorized.length == 0) { String releaseGroupError = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","choose_one_group"); context.addMessage(null,new FacesMessage(releaseGroupError)); error=true; assessmentSettings.setNoGroupSelectedError(true); } else { assessmentSettings.setNoGroupSelectedError(false); } } // if timed assessment, does it has value for time Object time=assessmentSettings.getValueMap().get("hasTimeAssessment"); boolean isTime=false; try { if (time != null) { isTime = ( (Boolean) time).booleanValue(); } } catch (Exception ex) { // keep default log.warn("Expecting Boolean hasTimeAssessment, got: " + time); } if((isTime) &&((assessmentSettings.getTimeLimit().intValue())==0)){ String time_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","timeSelect_error"); context.addMessage(null,new FacesMessage(time_err)); error=true; } String ipString = assessmentSettings.getIpAddresses().trim().replace(" ", ""); String[]arraysIp=(ipString.split("\n")); boolean ipErr=false; for(int a=0;a<arraysIp.length;a++){ String currentString=arraysIp[a]; if(!currentString.trim().equals("")){ if(a<(arraysIp.length-1)) currentString=currentString.substring(0,currentString.length()-1); if(!s.isIpValid(currentString)){ ipErr=true; break; } } } if(ipErr){ error=true; String ip_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","ip_error"); context.addMessage(null,new FacesMessage(ip_err)); } String unlimitedSubmissions = assessmentSettings.getUnlimitedSubmissions(); if (unlimitedSubmissions != null && unlimitedSubmissions.equals(AssessmentAccessControlIfc.LIMITED_SUBMISSIONS.toString())) { try { String submissionsAllowed = assessmentSettings.getSubmissionsAllowed().trim(); int submissionAllowed = Integer.parseInt(submissionsAllowed); if (submissionAllowed < 1) { throw new RuntimeException(); } } catch (RuntimeException e){ error=true; String submission_err = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","submissions_allowed_error"); context.addMessage(null,new FacesMessage(submission_err)); } } //String unlimitedSubmissions = assessmentSettings.getUnlimitedSubmissions(); String scoringType=assessmentSettings.getScoringType(); if ((scoringType).equals(EvaluationModelIfc.AVERAGE_SCORE.toString()) && "0".equals(assessmentSettings.getUnlimitedSubmissions())) { try { String submissionsAllowed = assessmentSettings.getSubmissionsAllowed().trim(); int submissionAllowed = Integer.parseInt(submissionsAllowed); if (submissionAllowed < 2) { throw new RuntimeException(); } } catch (RuntimeException e){ error=true; String submission_err = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","averag_grading_single_submission"); context.addMessage(null,new FacesMessage(submission_err)); } } //check feedback - if at specific time then time should be defined. if((assessmentSettings.getFeedbackDelivery()).equals("2")) { if (StringUtils.isBlank(assessmentSettings.getFeedbackDateString())) { error=true; String date_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","date_error"); context.addMessage(null,new FacesMessage(date_err)); } else if(!assessmentSettings.getIsValidFeedbackDate()){ String feedbackDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_feedback_date"); context.addMessage(null,new FacesMessage(feedbackDateErr)); error=true; } if(StringUtils.isNotBlank(assessmentSettings.getFeedbackEndDateString()) && assessmentSettings.getFeedbackDate().after(assessmentSettings.getFeedbackEndDate())){ String feedbackDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_feedback_ranges"); context.addMessage(null,new FacesMessage(feedbackDateErr)); error=true; } boolean scoreThresholdEnabled = assessmentSettings.getFeedbackScoreThresholdEnabled(); //Check if the value is empty boolean scoreThresholdError = StringUtils.isBlank(assessmentSettings.getFeedbackScoreThreshold()); //If the threshold value is not empty, check if is a valid percentage if (!scoreThresholdError) { String submittedScoreThreshold = StringUtils.replace(assessmentSettings.getFeedbackScoreThreshold(), ",", "."); try { Double doubleInput = new Double(submittedScoreThreshold); if(doubleInput.compareTo(new Double("0.0")) == -1 || doubleInput.compareTo(new Double("100.0")) == 1){ throw new Exception(); } } catch(Exception ex) { scoreThresholdError = true; } } //If the threshold is enabled and is not valid, display an error. if(scoreThresholdEnabled && scoreThresholdError){ error = true; String str_err = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","feedback_score_threshold_required"); context.addMessage(null,new FacesMessage(str_err)); } } // check secure delivery exit password SecureDeliveryServiceAPI secureDeliveryService = SamigoApiFactory.getInstance().getSecureDeliveryServiceAPI(); if ( secureDeliveryService.isSecureDeliveryAvaliable() ) { String moduleId = assessmentSettings.getSecureDeliveryModule(); if ( ! SecureDeliveryServiceAPI.NONE_ID.equals( moduleId ) ) { String exitPassword = assessmentSettings.getSecureDeliveryModuleExitPassword(); if ( exitPassword != null && exitPassword.length() > 0 ) { for ( int i = 0; i < exitPassword.length(); i++ ) { char c = exitPassword.charAt(i); if ( ! (( c >= 'a' && c <= 'z' ) || ( c >= 'A' && c <= 'Z' ) || ( c >= '0' && c <= '9' )) ) { error = true; String submission_err = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","exit_password_error"); context.addMessage(null,new FacesMessage(submission_err)); break; } } } } } if (error){ String blockDivs = ContextUtil.lookupParam("assessmentSettingsAction:blockDivs"); assessmentSettings.setBlockDivs(blockDivs); assessmentSettings.setOutcomeSave("editAssessmentSettings"); return; } // Set the outcome once Save button is clicked AuthorBean author = (AuthorBean) ContextUtil.lookupBean("author"); AuthorizationBean authorization = (AuthorizationBean) ContextUtil.lookupBean("authorization"); assessmentSettings.setOutcomeSave(author.getFromPage()); s.save(assessmentSettings, false); // reset the core listing in case assessment title changes List<AssessmentFacade> assessmentList = assessmentService.getBasicInfoOfAllActiveAssessments( author.getCoreAssessmentOrderBy(),author.isCoreAscending()); Iterator iter = assessmentList.iterator(); while (iter.hasNext()) { AssessmentFacade assessmentFacade= (AssessmentFacade) iter.next(); assessmentFacade.setTitle(ComponentManager.get(FormattedText.class).convertFormattedTextToPlaintext(assessmentFacade.getTitle())); } // get the managed bean, author and set the list List allAssessments = new ArrayList<>(); if (authorization.getEditAnyAssessment() || authorization.getEditOwnAssessment()) { allAssessments.addAll(assessmentList); } if (authorization.getGradeAnyAssessment() || authorization.getGradeOwnAssessment()) { allAssessments.addAll(author.getPublishedAssessments()); } author.setAssessments(assessmentList); author.setAllAssessments(allAssessments); // goto Question Authoring page EditAssessmentListener editA= new EditAssessmentListener(); editA.setPropertiesForAssessment(author); } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yunos.alicontacts.interactions; import android.app.Activity; import android.app.DialogFragment; import android.content.Context; import android.content.CursorLoader; import android.content.Intent; import android.content.Loader; import android.content.Loader.OnLoadCompleteListener; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.os.Parcel; import android.os.Parcelable; import android.provider.ContactsContract.CommonDataKinds.Phone; import android.provider.ContactsContract.CommonDataKinds.SipAddress; import android.provider.ContactsContract.Contacts; import android.provider.ContactsContract.Data; import android.provider.ContactsContract.RawContacts; import android.text.TextUtils; import android.util.Log; import android.widget.ListAdapter; import android.widget.Toast; import com.google.common.annotations.VisibleForTesting; import com.yunos.alicontacts.CallUtil; import com.yunos.alicontacts.Collapser; import com.yunos.alicontacts.Collapser.Collapsible; import com.yunos.alicontacts.ContactsUtils; import com.yunos.alicontacts.R; import com.yunos.alicontacts.activities.TransactionSafeActivity; import hwdroid.dialog.AlertDialog; import hwdroid.dialog.DialogInterface; import hwdroid.dialog.DialogInterface.OnDismissListener; import yunos.support.v4.app.FragmentManager; import java.util.ArrayList; import java.util.List; /** * Initiates phone calls or a text message. If there are multiple candidates, this class shows a * dialog to pick one. Creating one of these interactions should be done through the static * factory methods. * * Note that this class initiates not only usual *phone* calls but also *SIP* calls. * * TODO: clean up code and documents since it is quite confusing to use "phone numbers" or * "phone calls" here while they can be SIP addresses or SIP calls (See also issue 5039627). */ public class PhoneNumberInteraction2 implements OnLoadCompleteListener<Cursor>{ private static final String TAG = PhoneNumberInteraction2.class.getSimpleName(); ArrayList<PhoneItem> mPhoneList = new ArrayList<PhoneItem>(); //private CheckBox mCheckBox; @VisibleForTesting /* package */ enum InteractionType { PHONE_CALL, SMS } /** * A model object for capturing a phone number for a given contact. */ @VisibleForTesting /* package */static class PhoneItem implements Parcelable, Collapsible<PhoneItem> { long id; String phoneNumber; String accountType; String dataSet; long type; String label; /** {@link Phone#CONTENT_ITEM_TYPE} or {@link SipAddress#CONTENT_ITEM_TYPE}. */ String mimeType; public PhoneItem() { } private PhoneItem(Parcel in) { this.id = in.readLong(); this.phoneNumber = in.readString(); this.accountType = in.readString(); this.dataSet = in.readString(); this.type = in.readLong(); this.label = in.readString(); this.mimeType = in.readString(); } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeLong(id); dest.writeString(phoneNumber); dest.writeString(accountType); dest.writeString(dataSet); dest.writeLong(type); dest.writeString(label); dest.writeString(mimeType); } @Override public int describeContents() { return 0; } @Override public boolean collapseWith(PhoneItem phoneItem) { if (!shouldCollapseWith(phoneItem)) { return false; } // Just keep the number and id we already have. return true; } @Override public boolean shouldCollapseWith(PhoneItem phoneItem) { return ContactsUtils.shouldCollapse(Phone.CONTENT_ITEM_TYPE, phoneNumber, Phone.CONTENT_ITEM_TYPE, phoneItem.phoneNumber); } @Override public String toString() { return phoneNumber; } public static final Parcelable.Creator<PhoneItem> CREATOR = new Parcelable.Creator<PhoneItem>() { @Override public PhoneItem createFromParcel(Parcel in) { return new PhoneItem(in); } @Override public PhoneItem[] newArray(int size) { return new PhoneItem[size]; } }; } /** * {@link DialogFragment} used for displaying a dialog with a list of phone numbers of which * one will be chosen to make a call or initiate an sms message. * * It is recommended to use * {@link PhoneNumberInteraction#startInteractionForPhoneCall(TransactionSafeActivity, Uri)} or * {@link PhoneNumberInteraction#startInteractionForTextMessage(TransactionSafeActivity, Uri)} * instead of directly using this class, as those methods handle one or multiple data cases * appropriately. */ /* Made public to let the system reach this class */ public static class PhoneDisambiguationDialogFragment extends DialogFragment { private static final String ARG_PHONE_LIST = "phoneList"; private static final String ARG_INTERACTION_TYPE = "interactionType"; private static final String ARG_CALL_ORIGIN = "callOrigin"; private InteractionType mInteractionType; private ListAdapter mPhonesAdapter; private List<PhoneItem> mPhoneList; private String mCallOrigin; public static void show(FragmentManager fragmentManager, ArrayList<PhoneItem> phoneList, InteractionType interactionType, String callOrigin) { PhoneDisambiguationDialogFragment fragment = new PhoneDisambiguationDialogFragment(); Bundle bundle = new Bundle(); bundle.putParcelableArrayList(ARG_PHONE_LIST, phoneList); bundle.putSerializable(ARG_INTERACTION_TYPE, interactionType); bundle.putString(ARG_CALL_ORIGIN, callOrigin); fragment.setArguments(bundle); } } private static final String[] PHONE_NUMBER_PROJECTION = new String[] { Phone._ID, Phone.NUMBER, Phone.IS_SUPER_PRIMARY, RawContacts.ACCOUNT_TYPE, RawContacts.DATA_SET, Phone.TYPE, Phone.LABEL, Phone.MIMETYPE }; private static final String PHONE_NUMBER_SELECTION = Data.MIMETYPE + " IN ('" + Phone.CONTENT_ITEM_TYPE + "', " + "'" + SipAddress.CONTENT_ITEM_TYPE + "') AND " + Data.DATA1 + " NOT NULL"; private final Context mContext; private final OnDismissListener mDismissListener; private final InteractionType mInteractionType; private final String mCallOrigin; private CursorLoader mLoader; /** * Constructs a new {@link PhoneNumberInteraction}. The constructor takes in a {@link Context} * instead of a {@link TransactionSafeActivity} for testing purposes to verify the functionality * of this class. However, all factory methods for creating {@link PhoneNumberInteraction}s * require a {@link TransactionSafeActivity} (i.e. see {@link #startInteractionForPhoneCall}). */ @VisibleForTesting /* package */ PhoneNumberInteraction2(Context context, InteractionType interactionType, DialogInterface.OnDismissListener dismissListener) { this(context, interactionType, dismissListener, null); } private PhoneNumberInteraction2(Context context, InteractionType interactionType, DialogInterface.OnDismissListener dismissListener, String callOrigin) { mContext = context; mInteractionType = interactionType; mDismissListener = dismissListener; mCallOrigin = callOrigin; } private void performAction(String phoneNumber) { PhoneNumberInteraction2.performAction(mContext, phoneNumber, mInteractionType, mCallOrigin); } private static void performAction( Context context, String phoneNumber, InteractionType interactionType, String callOrigin) { Intent intent; switch (interactionType) { case SMS: intent = new Intent( Intent.ACTION_SENDTO, Uri.fromParts("sms", phoneNumber, null)); intent.setClassName(ContactsUtils.MMS_PACKAGE, ContactsUtils.MMS_COMPOSE_ACTIVITY_NAME); //intent.setClass(context, ComposeMessageActivity.class); break; default: intent = CallUtil.getCallIntent(context, phoneNumber, callOrigin); break; } context.startActivity(intent); } /** * Initiates the interaction. This may result in a phone call or sms message started * or a disambiguation dialog to determine which phone number should be used. */ @VisibleForTesting /* package */ void startInteraction(Uri uri) { if(uri == null) { Log.e(TAG, "PhoneNumberInteraction2::startInteraction ERROR: uri == null!!!!!"); return; } if (mLoader != null) { mLoader.reset(); } final Uri queryUri; final String inputUriAsString = uri.toString(); if (inputUriAsString.startsWith(Contacts.CONTENT_URI.toString())) { if (!inputUriAsString.endsWith(Contacts.Data.CONTENT_DIRECTORY)) { queryUri = Uri.withAppendedPath(uri, Contacts.Data.CONTENT_DIRECTORY); } else { queryUri = uri; } } else if (inputUriAsString.startsWith(Data.CONTENT_URI.toString())) { queryUri = uri; } else { throw new UnsupportedOperationException( "Input Uri must be contact Uri or data Uri (input: \"" + uri + "\")"); } mLoader = new CursorLoader(mContext, queryUri, PHONE_NUMBER_PROJECTION, PHONE_NUMBER_SELECTION, null, null); mLoader.registerListener(0, this); mLoader.startLoading(); } @Override public void onLoadComplete(Loader<Cursor> loader, Cursor cursor) { if (cursor == null || !isSafeToCommitTransactions()) { onDismiss(); return; } String primaryPhone = null; try { while (cursor.moveToNext()) { if (cursor.getInt(cursor.getColumnIndex(Phone.IS_SUPER_PRIMARY)) != 0) { // Found super primary, call it. primaryPhone = cursor.getString(cursor.getColumnIndex(Phone.NUMBER)); break; } String phoneNumber = cursor.getString(cursor.getColumnIndex(Phone.NUMBER)); if (TextUtils.isEmpty(phoneNumber)) { continue; } PhoneItem item = new PhoneItem(); item.id = cursor.getLong(cursor.getColumnIndex(Data._ID)); item.phoneNumber = phoneNumber; item.accountType = cursor.getString(cursor.getColumnIndex(RawContacts.ACCOUNT_TYPE)); item.dataSet = cursor.getString(cursor.getColumnIndex(RawContacts.DATA_SET)); item.type = cursor.getInt(cursor.getColumnIndex(Phone.TYPE)); item.label = cursor.getString(cursor.getColumnIndex(Phone.LABEL)); item.mimeType = cursor.getString(cursor.getColumnIndex(Phone.MIMETYPE)); mPhoneList.add(item); } } finally { cursor.close(); } if (primaryPhone != null) { performAction(primaryPhone); onDismiss(); return; } Collapser.collapseList(mPhoneList); if (mPhoneList.isEmpty()) { Toast.makeText(mContext, R.string.contact_no_number_to_dial_sms, Toast.LENGTH_SHORT) .show(); onDismiss(); } else if (mPhoneList.size() == 1) { PhoneItem item = mPhoneList.get(0); onDismiss(); performAction(item.phoneNumber); } else { // There are multiple candidates. Let the user choose one. showDisambiguationDialog(mPhoneList); } } private boolean isSafeToCommitTransactions() { return mContext instanceof TransactionSafeActivity ? ((TransactionSafeActivity) mContext).isSafeToCommitTransactions() : true; } private void onDismiss() { if (mDismissListener != null) { mDismissListener.onDismiss(null); } } /** * Start call action using given contact Uri. If there are multiple candidates for the phone * call, dialog is automatically shown and the user is asked to choose one. * * @param activity that is calling this interaction. This must be of type * {@link TransactionSafeActivity} because we need to check on the activity state after the * phone numbers have been queried for. * @param uri contact Uri (built from {@link Contacts#CONTENT_URI}) or data Uri * (built from {@link Data#CONTENT_URI}). Contact Uri may show the disambiguation dialog while * data Uri won't. */ public static void startInteractionForPhoneCall(Activity activity, Uri uri) { (new PhoneNumberInteraction2(activity, InteractionType.PHONE_CALL, null)) .startInteraction(uri); } /** * @param activity that is calling this interaction. This must be of type * {@link TransactionSafeActivity} because we need to check on the activity state after the * phone numbers have been queried for. * @param callOrigin If non null, {@link DialtactsActivity#EXTRA_CALL_ORIGIN} will be * appended to the Intent initiating phone call. See comments in Phone package (PhoneApp) * for more detail. */ public static void startInteractionForPhoneCall(TransactionSafeActivity activity, Uri uri, String callOrigin) { (new PhoneNumberInteraction2(activity, InteractionType.PHONE_CALL, null, callOrigin)) .startInteraction(uri); } /** * Start text messaging (a.k.a SMS) action using given contact Uri. If there are multiple * candidates for the phone call, dialog is automatically shown and the user is asked to choose * one. * * @param activity that is calling this interaction. This must be of type * {@link TransactionSafeActivity} because we need to check on the activity state after the * phone numbers have been queried for. * @param uri contact Uri (built from {@link Contacts#CONTENT_URI}) or data Uri * (built from {@link Data#CONTENT_URI}). Contact Uri may show the disambiguation dialog while * data Uri won't. */ public static void startInteractionForTextMessage(Activity activity, Uri uri) { (new PhoneNumberInteraction2(activity, InteractionType.SMS, null)).startInteraction(uri); } @VisibleForTesting /* package */CursorLoader getLoader() { return mLoader; } @VisibleForTesting /* package */void showDisambiguationDialog(ArrayList<PhoneItem> phoneList) { AlertDialog.Builder builder = new AlertDialog.Builder(mContext); CharSequence[] items = new CharSequence[phoneList.size()]; for(int i = 0; i < phoneList.size(); i++) { items[i] = phoneList.get(i).phoneNumber; } builder.setItems(items, new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which) { final Activity activity = (Activity) mContext; if (activity == null) return; if (mPhoneList.size() > (which + 1) && which >= 0) { final PhoneItem phoneItem = mPhoneList.get(which); PhoneNumberInteraction2.performAction(activity, phoneItem.phoneNumber, mInteractionType, mCallOrigin); } else { dialog.dismiss(); } }}); builder.setTitle(mInteractionType == InteractionType.SMS ? R.string.sms_disambig_title : R.string.call_disambig_title); AlertDialog dialog = builder.create(); dialog.show(); } }
package com.me.ui.util; import android.app.Activity; import android.app.KeyguardManager; import android.content.Context; import android.content.pm.ActivityInfo; import android.content.res.Configuration; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.Point; import android.os.Build; import android.provider.Settings; import android.support.annotation.NonNull; import android.support.annotation.RequiresPermission; import android.util.DisplayMetrics; import android.view.Surface; import android.view.View; import android.view.Window; import android.view.WindowManager; import static android.Manifest.permission.WRITE_SETTINGS; /** * <pre> * author: Blankj * blog : http://blankj.com * time : 2016/08/02 * desc : utils about screen * </pre> */ public final class ScreenUtils { private ScreenUtils() { throw new UnsupportedOperationException("u can't instantiate me..."); } /** * Return the width of screen, in pixel. * * @return the width of screen, in pixel */ public static int getScreenWidth() { WindowManager wm = (WindowManager) Utils.getApp().getSystemService(Context.WINDOW_SERVICE); Point point = new Point(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { //noinspection ConstantConditions wm.getDefaultDisplay().getRealSize(point); } else { //noinspection ConstantConditions wm.getDefaultDisplay().getSize(point); } return point.x; } /** * Return the height of screen, in pixel. * * @return the height of screen, in pixel */ public static int getScreenHeight() { WindowManager wm = (WindowManager) Utils.getApp().getSystemService(Context.WINDOW_SERVICE); Point point = new Point(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { //noinspection ConstantConditions wm.getDefaultDisplay().getRealSize(point); } else { //noinspection ConstantConditions wm.getDefaultDisplay().getSize(point); } return point.y; } /** * Return the density of screen. * * @return the density of screen */ public static float getScreenDensity() { return Resources.getSystem().getDisplayMetrics().density; } /** * Return the screen density expressed as dots-per-inch. * * @return the screen density expressed as dots-per-inch */ public static int getScreenDensityDpi() { return Resources.getSystem().getDisplayMetrics().densityDpi; } /** * Set full screen. * * @param activity The activity. */ public static void setFullScreen(@NonNull final Activity activity) { activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); } /** * Set non full screen. * * @param activity The activity. */ public static void setNonFullScreen(@NonNull final Activity activity) { activity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); } /** * Toggle full screen. * * @param activity The activity. */ public static void toggleFullScreen(@NonNull final Activity activity) { int fullScreenFlag = WindowManager.LayoutParams.FLAG_FULLSCREEN; Window window = activity.getWindow(); if ((window.getAttributes().flags & fullScreenFlag) == fullScreenFlag) { window.clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN | WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); } else { window.addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN | WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS); } } /** * Return whether screen is full. * * @param activity The activity. * @return {@code true}: yes<br>{@code false}: no */ public static boolean isFullScreen(@NonNull final Activity activity) { int fullScreenFlag = WindowManager.LayoutParams.FLAG_FULLSCREEN; return (activity.getWindow().getAttributes().flags & fullScreenFlag) == fullScreenFlag; } /** * Set the screen to landscape. * * @param activity The activity. */ public static void setLandscape(@NonNull final Activity activity) { activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); } /** * Set the screen to portrait. * * @param activity The activity. */ public static void setPortrait(@NonNull final Activity activity) { activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); } /** * Return whether screen is landscape. * * @return {@code true}: yes<br>{@code false}: no */ public static boolean isLandscape() { return Utils.getApp().getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE; } /** * Return whether screen is portrait. * * @return {@code true}: yes<br>{@code false}: no */ public static boolean isPortrait() { return Utils.getApp().getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT; } /** * Return the rotation of screen. * * @param activity The activity. * @return the rotation of screen */ public static int getScreenRotation(@NonNull final Activity activity) { switch (activity.getWindowManager().getDefaultDisplay().getRotation()) { case Surface.ROTATION_0: return 0; case Surface.ROTATION_90: return 90; case Surface.ROTATION_180: return 180; case Surface.ROTATION_270: return 270; default: return 0; } } /** * Return the bitmap of screen. * * @param activity The activity. * @return the bitmap of screen */ public static Bitmap screenShot(@NonNull final Activity activity) { return screenShot(activity, false); } /** * Return the bitmap of screen. * * @param activity The activity. * @param isDeleteStatusBar True to delete status bar, false otherwise. * @return the bitmap of screen */ public static Bitmap screenShot(@NonNull final Activity activity, boolean isDeleteStatusBar) { View decorView = activity.getWindow().getDecorView(); decorView.setDrawingCacheEnabled(true); decorView.setWillNotCacheDrawing(false); Bitmap bmp = decorView.getDrawingCache(); if (bmp == null) return null; DisplayMetrics dm = new DisplayMetrics(); activity.getWindowManager().getDefaultDisplay().getMetrics(dm); Bitmap ret; if (isDeleteStatusBar) { Resources resources = activity.getResources(); int resourceId = resources.getIdentifier("status_bar_height", "dimen", "android"); int statusBarHeight = resources.getDimensionPixelSize(resourceId); ret = Bitmap.createBitmap( bmp, 0, statusBarHeight, dm.widthPixels, dm.heightPixels - statusBarHeight ); } else { ret = Bitmap.createBitmap(bmp, 0, 0, dm.widthPixels, dm.heightPixels); } decorView.destroyDrawingCache(); return ret; } /** * Return whether screen is locked. * * @return {@code true}: yes<br>{@code false}: no */ public static boolean isScreenLock() { KeyguardManager km = (KeyguardManager) Utils.getApp().getSystemService(Context.KEYGUARD_SERVICE); //noinspection ConstantConditions return km.inKeyguardRestrictedInputMode(); } /** * Set the duration of sleep. * <p>Must hold {@code <uses-permission android:name="android.permission.WRITE_SETTINGS" />}</p> * * @param duration The duration. */ @RequiresPermission(WRITE_SETTINGS) public static void setSleepDuration(final int duration) { Settings.System.putInt( Utils.getApp().getContentResolver(), Settings.System.SCREEN_OFF_TIMEOUT, duration ); } /** * Return the duration of sleep. * * @return the duration of sleep. */ public static int getSleepDuration() { try { return Settings.System.getInt( Utils.getApp().getContentResolver(), Settings.System.SCREEN_OFF_TIMEOUT ); } catch (Settings.SettingNotFoundException e) { e.printStackTrace(); return -123; } } /** * Return whether device is tablet. * * @return {@code true}: yes<br>{@code false}: no */ public static boolean isTablet() { return (Utils.getApp().getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_LARGE; } }
// ==================================================================== // FILE NAME: KilroyGyro.java (Team 339 - Kilroy) // // CREATED ON: sometime during 2017 build season // CREATED BY: Alex Kneipp // MODIFIED ON:2/28/17 and 2/29/17 // MODIFIED BY: Ashley Espeland // ABSTRACT: // deals with all of our shooter code package org.usfirst.frc.team339.Utils; import com.ctre.CANTalon; import org.usfirst.frc.team339.Hardware.Hardware; import org.usfirst.frc.team339.HardwareInterfaces.IRSensor; import org.usfirst.frc.team339.HardwareInterfaces.UltraSonic; import org.usfirst.frc.team339.vision.ImageProcessor; import edu.wpi.first.wpilibj.Spark; // import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.Victor; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; // TODO shooter encoder instead of pot /** * Describes the shooter object for the 2017 game: FIRST Steamworks. It's a * flywheel shooter with an elevator loader. Look at the technical packet from * 2017 for more information. * * @author Alexander H. Kneipp * */ public class Shooter { private CANTalon flywheelController = null; private IRSensor elevatorSensor = null; private Spark elevatorController = null; private double acceptableError = 75; private ImageProcessor visionTargeter = null; private double acceptableGimbalError = .5;// in degrees private CANTalon gimbalMotor = null; private Victor agitatorMotor = null; private UltraSonic distanceSensor = null; // private Timer shooterTimer = new Timer();1 /** * Creates a new shooter object for the 2017 season, SteamWorks * * @param controller * The motor controller which runs the flywheel. * @param ballLoaderSensor * Detects if there's a ball ready to be fired. * @param elevator * The motor controller which loads the loader elevator * @param acceptableFlywheelSpeedError * The error we can handle on the flywheel without losing * accuracy * @param visionTargeting * Our vision processor object, used to target the high boiler. * @param acceptableGimbalError * The acceptable angular angle, in degrees, the gimbal turret is * allowed to be off. * @param gimbalMotor * The motor controller the turret is run on * @param agitatorMotor * The motor controller the agitator motor is connected to * @param distanceSensor * TODO * @param gimbalEnc * The potentiometer that reads the bearing of the turret. */ public Shooter (CANTalon controller, IRSensor ballLoaderSensor, Spark elevator, double acceptableFlywheelSpeedError, ImageProcessor visionTargeting, double acceptableGimbalError, CANTalon gimbalMotor, Victor agitatorMotor, UltraSonic distanceSensor) { this.flywheelController = controller; this.elevatorSensor = ballLoaderSensor; this.elevatorController = elevator; this.acceptableError = acceptableFlywheelSpeedError; this.visionTargeter = visionTargeting; this.gimbalMotor = gimbalMotor; this.agitatorMotor = agitatorMotor; this.distanceSensor = distanceSensor; } //// TODO MAKE SURE THIS IS OK!!!! // public Shooter (CANTalon controller, IRSensor ballLoaderSensor, // Spark elevator, double acceptableFlywheelSpeedError, // ImageProcessor visionTargeting, double acceptableGimbalError, // CANTalon gimbalMotor, Spark agitatorMotor) // { // this.flywheelController = controller; // this.elevatorSensor = ballLoaderSensor; // this.elevator = elevator; // this.acceptableError = acceptableFlywheelSpeedError; // this.visionTargeter = visionTargeting; // this.gimbalMotor = gimbalMotor; // this.agitatorMotor = agitatorMotor; // } /** * @param error * The allowed error deadband for our gimbal, in degrees. */ public void setAcceptableGimbalError (double error) { this.acceptableGimbalError = error; } /** * @return The allowed error deadband for our gimbal, in degrees. */ public double getAcceptableGimbalError () { // returns what we consider an acceptable amount of error for the gimbal return this.acceptableGimbalError; } /** * @param error * The acceptable flywheel RPM error at which we will still fire * balls. */ public void setAcceptableFlywheelError (double error) { // sets the acceptable error for the flywheel this.acceptableError = error; } /** * @return The acceptable flywheel RPM error at which we will still fire * balls. */ public double getAcceptableFlywheelError () { // returns what we consider an acceptable amount of error for the flywheel return this.acceptableError; } // TODO write stops for other motors. TODO check that we did that. /** * Stops the flywheel motor. */ public void stopFlywheelMotor () { // stops the flywheel motor this.flywheelController.set(0.0); } /** * Runs the agitator and elevator towards the shooter. */ public void loadBalls () { // load balls by running the elevator and agitator at their assigned speeds this.elevatorController.set(ELEVATOR_SPEED); this.agitatorMotor.set(AGITATOR_SPEED); //@ANE removed for sanity's sake //Hardware.intake.startIntake(); } /** * Stops both the elevator and the agitator. */ public void stopLoader () { // stops loading balls by stopping the elevator and the agitator this.elevatorController.set(0.0); this.agitatorMotor.set(0.0); //@ANE removed for samity's sake // Hardware.intake.stopIntake(); } /** * Runs the elevator backwards and also runs the agitatior. */ public void reverseLoader () { // reverse loads by running the elevator in reverse and the agitator at // its normal speed (normal includes its direction) this.elevatorController.set(-ELEVATOR_SPEED); this.agitatorMotor.set(AGITATOR_SPEED); } /** * Prepares to fire and fires a ball. * * @return True if we've fired, false if we haven't yet. * @deprecated Use {@link #fire(double)} instead */ public boolean fire () { // return true if weve fired and false if we havent yet return fire(0); } /** * Prepares to fire and fires a ball. * * @param rpmOffset * TODO * * @return True if we've fired, false if we haven't yet. */ public boolean fire (double rpmOffset) { // System.out.println("RPMOffset in fire: " + rpmOffset); readyToFire = prepareToFire(rpmOffset); // if readyToFire is equal to false if (readyToFire == false) { // then return false return false; } // this.elevatorController.set(0); // sets readyToFire to false readyToFire = false; // return true return true; } private boolean readyToFire = false; /** * Prepares to fire a ball by revving up the flywheel motor and sets up a * ball to be fired. * * @return true if we're ready to fire, false otherwise. * @deprecated Use {@link #prepareToFire(double)} instead */ public boolean prepareToFire () { // return true if were ready to fire fire and false if we arent return prepareToFire(0); } /** * Prepares to fire a ball by revving up the flywheel motor and sets up a * ball to be fired. * * @param rpmOffset * * * @return true if we're ready to fire, false otherwise. */ public boolean prepareToFire (double rpmOffset) { // System.out.println("RPMOffset in prepareToFire: " + rpmOffset); // dist is the distance to goal double dist = 1;/* * this.distanceSensor.getDistanceFromNearestBumper() * / 12.0; */ // if the distance to goal is greater than 0 if (dist > 0) { // then set flywheel to half the calculated RPM(to make the goal) // plus the rpm offset this.flywheelController .set(/* .5 * this.calculateRPMToMakeGoal(dist) */1900 + rpmOffset); // print to the smartDashboard the flywheel speed SmartDashboard.putNumber("Flywheel speed", this.flywheelController.getSpeed()); // divides the absolute value of the flywheel error by four // if this value is greater than the acceptable error if (Math.abs(this.flywheelController.getError() / 4.0) > this.acceptableError) { // IF we are not in the error range AND the sensor does not read // balls, start loading balls // if (this.elevatorSensor.isOn() == false) // { // this.loadBalls(); // } // else // IF we are not in the error range AND the sensor DOES read // // balls, stop loading balls // { // this.stopLoader(); // } // returns false return false; } } else // return false return false; // if (this.elevatorSensor.isOn()) // { // this.stopLoader(); // } // else // { // this.loadBalls(); // return false; // } // IF we are in the error range then load balls and we are ready to fire // this.loadBalls(); // return true return true; } /** * Turns the turret to the new bearing on the robot. * * @param newBearing * The new angle, relative to the robot, to turn the turret to. * (- is left, + is to the right) * @return SUCCESS if we're at our angle (within our threshold), TOO_FAR if * we can't gimbal that much, or WORKING if we're not done yet. */ // TODO slow down as we approach it public turnReturn turnToBearing (double newBearing) { // if the absolute value of the difference between the new bearing and // the current bearing is less than or equal to the acceptable Gimbal error if (Math.abs( newBearing - this.getBearing()) >= acceptableGimbalError) { // if the difference between the newBearing and the current bearing is // less than 0 if (newBearing - getBearing() < 0) { // returns turnGimbal(-MEDIUM_TURN_SPEED) // AKA negative medium turn speed return this.turnGimbal(-MEDIUM_TURN_SPEED); } // returns turnGimbal(MEDIUM_TURN_SPEED) return this.turnGimbal(MEDIUM_TURN_SPEED); } // stops gimbal this.stopGimbal(); // returns turnReturn.SUCCESS return turnReturn.SUCCESS; } /** * Sets the gimbal motor to 0. Call after any turnGimbal function (except * turnToBearing) */ public void stopGimbal () { // sets the gimbal to 0 this.turnGimbal(0.0); } /** * Turns the gimbal at our slow speed. * * run stopGimbal afterwards * * @param direction * Negative 1 or positive 1, positive for right, negative for * left * @return see turnGimbal(double) */ public turnReturn turnGimbalSlow (int direction) { // returns turnGimbal(direction * SLOW_TURN_SPEED) return this.turnGimbal(direction * SLOW_TURN_SPEED); } /** * Turns the gimbal at our medium speed. * * run stopGimbal afterwards * * @param direction * Negative 1 or positive 1, positive for right, negative for * left * @return see turnGimbal(double) */ public turnReturn turnGimbalMedium (int direction) { // returns turnGimbal(direction * MEDIUM_TURN_SPEED) return this.turnGimbal(direction * MEDIUM_TURN_SPEED); } /** * Turns the turrets as quickly as we can. * * run stopGimbal afterwards * * @param direction * Negative 1 or positive 1, positive for right, negative for * left * @return see turnGimbal(double) */ public turnReturn turnGimbalFast (int direction) { // returns turnGimbal(direction * MAX_TURN_SPEED) return this.turnGimbal(direction * MAX_TURN_SPEED); } /** * Turns the gimbal if we're allowed to. * * @param speed * The speed and direction we turn the gimbal at (+ right, - * left)s * @return WORKING if we're turning, TOO_FAR if we're at or have passed our * limit. */ private turnReturn turnGimbal (double speed) { speed = -speed; // ^^ Turret is reversed ^^ // if the bearing is greater than or equal to the max gimbaling angle // and the speed is greater than 0 // OR if the bearing is less than or equal the max gimbaling angle // and the speed is less than 0 if ((this.getBearing() >= MAX_GIMBALING_ANGLE && speed > 0) || (this.getBearing() <= MIN_GIMBALING_ANGLE && speed < 0)) { // set gimbal motor to 0 this.gimbalMotor.set(0.0); // return turnReturn.TOO_FAR return turnReturn.TOO_FAR; } // TODO direction /* * Make sure we never turn faster than the maximum speed. * ALSO motor is reversed so... that's why it's like that. */ // if the speed is less than 0 if (speed < 0) { // then set to either the speed or the negative max turn speed, // based on which is greater this.gimbalMotor.set( Math.max(speed, -MAX_TURN_SPEED)); } // else id speed is greater than 0 else if (speed > 0) { // set the gimbal motor to the higher value of either the speed // or the Max turn speed this.gimbalMotor.set( Math.min(speed, MAX_TURN_SPEED)); } // else else { // set the gimbal motor to 0 (SSSSTOPPPP) this.gimbalMotor.set(0.0); } // return turnReturn.WORKING return turnReturn.WORKING; } /** * @return The bearing of the shooter relative to the robot, with negative * degrees to the left and positive to the right. Returns a range * 0 to MAX_ */ public double getBearing () { // return the product of the encoder position and the gimbal encoder factor return this.gimbalMotor.getEncPosition() * this.GIMBAL_ENCODER_FACTOR; } /** * Returned from turnToBearing as well as the turn slow, medium, and fast * functions. Provides additional information as to the state of the * function. * * @author Alexander H. Kneipp */ public static enum turnReturn { /** * We cannot gimbal as far as the user wants us to. Quit state. */ TOO_FAR, /** * We're still aligning, be patient! Not a quit state. */ WORKING, /** * We've successfully turned to the target position, good job robot. * Quit state. */ SUCCESS } /** * Uses the vision processing to align the gimbal to the high boiler. * * @return True if we're aligned, false otherwise. */ // TODO Radians and degrees public turnToGoalReturn turnToGoal () { // if we are running this for the first time if (firstTimeRun == true) { // then process the image this.visionTargeter.processImage(); // set firstTimeRun to false firstTimeRun = false; } // if the getLargetBlob is not equal to null if (this.visionTargeter.getLargestBlob() != null) { // if the turnToBearing (ehich is the direction to the largest // blob) is equalt to what we consider a "success" if (this.turnToBearing( Math.toDegrees(this.visionTargeter .getYawAngleToTarget(this.visionTargeter .getLargestBlob()))) == turnReturn.SUCCESS) { // return turnToGoalReturn.SUCCESS return turnToGoalReturn.SUCCESS; } } else { // process image this.visionTargeter.processImage(); // return turnToGoalReturn.NO_BLOBS return turnToGoalReturn.NO_BLOBS; } // return turnToGoalReturn.WORKING return turnToGoalReturn.WORKING; } public boolean turnToGoalRaw () { // process image this.visionTargeter.processImage(); // if the getLargestBlob is not equal to null if (this.visionTargeter.getLargestBlob() != null) { // if the absolute value of the center of the largest blob // (x value) divided by the horizontal resolution minus .5 is // less than turn to goal raw deadband if ((Math .abs((this.visionTargeter.getLargestBlob().center_mass_x / this.visionTargeter.camera .getHorizontalResolution()) - .5) <= TURN_TO_GOAL_RAW_DEADBAND) == true) { // return stopGimbal this.stopGimbal(); // return true return true; } // if the center of the largest blob(x value) divided by the // horizontal resolution is greater than .5 // (meaning it is too far to the right) if (this.visionTargeter.getLargestBlob().center_mass_x / this.visionTargeter.camera .getHorizontalResolution() > centerXLineOfImage) { // turn the gimbal to the left at -1 this.turnGimbalSlow(-1); // return false return false; } // if the center of the largest blob (x value) divided by the // horizontal resolution is greater than the center of the image if (this.visionTargeter.getLargestBlob().center_mass_x / this.visionTargeter.camera .getHorizontalResolution() < centerXLineOfImage) { // turn gimbal at medium speed to the right this.turnGimbalMedium(1); // return false return false; } } return false; } private double centerXLineOfImage = .5; private boolean isTurningToGoal = false; private turnReturn turningToGoalVal = turnReturn.SUCCESS; private boolean firstTimeRun = false; private double gimbalTarget = Double.MIN_VALUE; /** * * Return values for turnToGoal(), indicates different failure states and * working state. * * @author Alexander H. Kneipp * */ // TODO decide about whether we quit on one blob. public static enum turnToGoalReturn { /** * We don't see any blobs. Quit state. */ NO_BLOBS, /** * We see 1 blob, not enough to align. Quit state. */ NOT_ENOUGH_BLOBS, /** * We have successfully aligned. Quit state. */ SUCCESS, /** * Not currently used, but may be used if this often runs "working" for * way to long. Quit state. */ TIMEOUT, /** * Still aligning to target. NOT a quit state. */ WORKING, /** * We see the target, but we can't gimbal to it. The robot will need to * turn. Quit state. */ OUT_OF_GIMBALING_RANGE } /** * Uses physics to figure out how fast we need to spin the flywheel. Uses * the constant FLYWHEEL_SPEED_CORRECTION_CONSTANT to account for friction, * air resistance, and calculation imprecision. Change if you're shooting * too far or too close. Increase to increase shooting distance, decrese to * decrease. * * @param distance * The distance, in feet, the shooter is away from the goal * across the floor. * @return the RPM to run the flywheel wheel at. */ public double calculateRPMToMakeGoal (double distance) { // changes the distance into meters by multiplying by a the ratio double distanceMeters = distance * 3.28084;// Convert the distance // parameter // meters, for easier // computations. // calculates the "perfect" RPM to set the flywheel to, by calculating double perfectRPM = (60.0 / (2 * Math.PI) * (Math.sqrt( ((4.9 * (Math.pow(distanceMeters, 2))) / ((this.FLYWHEEL_RADIUS_METERS * this.FLYWHEEL_RADIUS_METERS) * (Math.pow(Math.cos( Math.toRadians(this.MOUNT_ANGLE)), 2)) * (distanceMeters * Math.tan( Math.toRadians(this.MOUNT_ANGLE)) - this.RELATIVE_GOAL_HEIGHT_METERS)))))); // return perfectRPM plus the flywheel correction speed constant // multiplied by the perfect RPM return perfectRPM + this.FLYWHEEL_SPEED_CORRECTION_CONSTANT * perfectRPM; } // --------------------------------------------------------------------- // variables // --------------------------------------------------------------------- private final double MAX_TURN_SPEED = .5; private final double MEDIUM_TURN_SPEED = .35; private final double SLOW_TURN_SPEED = .3; private final double TURN_TO_GOAL_RAW_DEADBAND = .05; private final double ELEVATOR_SPEED = 1;// .8 private final double AGITATOR_SPEED = -1; private final double GIMBAL_LEFT_OFFSET = .1;// Going left is slower than going // right for some reason public final double MAX_GIMBALING_ANGLE = 16;// in degrees public final double MIN_GIMBALING_ANGLE = -16;// in degrees private final double MOUNT_ANGLE = 60;// TODO figure out the actual number. private final double RELATIVE_GOAL_HEIGHT_METERS = 1.93; private final double FLYWHEEL_RADIUS_METERS = 0.0508; private final double FLYWHEEL_SPEED_CORRECTION_CONSTANT = -.13578;// TODO tune /** * factor the gimbal encoder must be set to (distance per pulse) * for degrees */ private final double GIMBAL_ENCODER_FACTOR = .00022448; }